diff --git a/esapi/api._.go b/esapi/api._.go index 9c6107cd11..e0c025dc1f 100755 --- a/esapi/api._.go +++ b/esapi/api._.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0 (8395c37): DO NOT EDIT +// Code generated from specification version 8.14.0 (999dcb8): DO NOT EDIT package esapi @@ -57,6 +57,10 @@ type API struct { ConnectorList ConnectorList ConnectorPost ConnectorPost ConnectorPut ConnectorPut + ConnectorSecretDelete ConnectorSecretDelete + ConnectorSecretGet ConnectorSecretGet + ConnectorSecretPost ConnectorSecretPost + ConnectorSecretPut ConnectorSecretPut ConnectorSyncJobCancel ConnectorSyncJobCancel ConnectorSyncJobCheckIn ConnectorSyncJobCheckIn ConnectorSyncJobDelete ConnectorSyncJobDelete @@ -65,12 +69,17 @@ type API struct { ConnectorSyncJobList ConnectorSyncJobList ConnectorSyncJobPost ConnectorSyncJobPost ConnectorSyncJobUpdateStats ConnectorSyncJobUpdateStats + ConnectorUpdateAPIKeyDocumentID ConnectorUpdateAPIKeyDocumentID ConnectorUpdateConfiguration ConnectorUpdateConfiguration ConnectorUpdateError ConnectorUpdateError ConnectorUpdateFiltering ConnectorUpdateFiltering + ConnectorUpdateIndexName ConnectorUpdateIndexName ConnectorUpdateName ConnectorUpdateName + ConnectorUpdateNative ConnectorUpdateNative ConnectorUpdatePipeline ConnectorUpdatePipeline ConnectorUpdateScheduling ConnectorUpdateScheduling + ConnectorUpdateServiceDocumentType ConnectorUpdateServiceDocumentType + ConnectorUpdateStatus ConnectorUpdateStatus Count Count Create Create DanglingIndicesDeleteDanglingIndex DanglingIndicesDeleteDanglingIndex @@ -89,6 +98,8 @@ type API struct { EqlGet EqlGet EqlGetStatus EqlGetStatus EqlSearch EqlSearch + EsqlAsyncQueryGet EsqlAsyncQueryGet + EsqlAsyncQuery EsqlAsyncQuery EsqlQuery EsqlQuery Exists Exists ExistsSource ExistsSource @@ -125,6 +136,8 @@ type API struct { Mtermvectors Mtermvectors OpenPointInTime OpenPointInTime Ping Ping + ProfilingFlamegraph ProfilingFlamegraph + ProfilingStacktraces ProfilingStacktraces ProfilingStatus ProfilingStatus PutScript PutScript QueryRulesetDelete QueryRulesetDelete @@ -177,7 +190,10 @@ type API struct { SynonymsPutSynonymRule SynonymsPutSynonymRule TermsEnum TermsEnum Termvectors Termvectors + TextStructureFindFieldStructure TextStructureFindFieldStructure + TextStructureFindMessageStructure TextStructureFindMessageStructure TextStructureFindStructure TextStructureFindStructure + TextStructureTestGrokPattern TextStructureTestGrokPattern TransformDeleteTransform TransformDeleteTransform TransformGetTransform TransformGetTransform TransformGetTransformStats TransformGetTransformStats @@ -292,6 +308,7 @@ type Indices struct { Recovery IndicesRecovery Refresh IndicesRefresh ReloadSearchAnalyzers IndicesReloadSearchAnalyzers + ResolveCluster IndicesResolveCluster ResolveIndex IndicesResolveIndex Rollover IndicesRollover Segments IndicesSegments @@ -556,6 +573,7 @@ type Security struct { PutRole SecurityPutRole PutUser SecurityPutUser QueryAPIKeys SecurityQueryAPIKeys + QueryUser SecurityQueryUser SamlAuthenticate SecuritySamlAuthenticate SamlCompleteLogout SecuritySamlCompleteLogout SamlInvalidate SecuritySamlInvalidate @@ -624,6 +642,10 @@ func New(t Transport) *API { ConnectorList: newConnectorListFunc(t), ConnectorPost: newConnectorPostFunc(t), ConnectorPut: newConnectorPutFunc(t), + ConnectorSecretDelete: newConnectorSecretDeleteFunc(t), + ConnectorSecretGet: newConnectorSecretGetFunc(t), + ConnectorSecretPost: newConnectorSecretPostFunc(t), + ConnectorSecretPut: newConnectorSecretPutFunc(t), ConnectorSyncJobCancel: newConnectorSyncJobCancelFunc(t), ConnectorSyncJobCheckIn: newConnectorSyncJobCheckInFunc(t), ConnectorSyncJobDelete: newConnectorSyncJobDeleteFunc(t), @@ -632,12 +654,17 @@ func New(t Transport) *API { ConnectorSyncJobList: newConnectorSyncJobListFunc(t), ConnectorSyncJobPost: newConnectorSyncJobPostFunc(t), ConnectorSyncJobUpdateStats: newConnectorSyncJobUpdateStatsFunc(t), + ConnectorUpdateAPIKeyDocumentID: newConnectorUpdateAPIKeyDocumentIDFunc(t), ConnectorUpdateConfiguration: newConnectorUpdateConfigurationFunc(t), ConnectorUpdateError: newConnectorUpdateErrorFunc(t), ConnectorUpdateFiltering: newConnectorUpdateFilteringFunc(t), + ConnectorUpdateIndexName: newConnectorUpdateIndexNameFunc(t), ConnectorUpdateName: newConnectorUpdateNameFunc(t), + ConnectorUpdateNative: newConnectorUpdateNativeFunc(t), ConnectorUpdatePipeline: newConnectorUpdatePipelineFunc(t), ConnectorUpdateScheduling: newConnectorUpdateSchedulingFunc(t), + ConnectorUpdateServiceDocumentType: newConnectorUpdateServiceDocumentTypeFunc(t), + ConnectorUpdateStatus: newConnectorUpdateStatusFunc(t), Count: newCountFunc(t), Create: newCreateFunc(t), DanglingIndicesDeleteDanglingIndex: newDanglingIndicesDeleteDanglingIndexFunc(t), @@ -656,6 +683,8 @@ func New(t Transport) *API { EqlGet: newEqlGetFunc(t), EqlGetStatus: newEqlGetStatusFunc(t), EqlSearch: newEqlSearchFunc(t), + EsqlAsyncQueryGet: newEsqlAsyncQueryGetFunc(t), + EsqlAsyncQuery: newEsqlAsyncQueryFunc(t), EsqlQuery: newEsqlQueryFunc(t), Exists: newExistsFunc(t), ExistsSource: newExistsSourceFunc(t), @@ -692,6 +721,8 @@ func New(t Transport) *API { Mtermvectors: newMtermvectorsFunc(t), OpenPointInTime: newOpenPointInTimeFunc(t), Ping: newPingFunc(t), + ProfilingFlamegraph: newProfilingFlamegraphFunc(t), + ProfilingStacktraces: newProfilingStacktracesFunc(t), ProfilingStatus: newProfilingStatusFunc(t), PutScript: newPutScriptFunc(t), QueryRulesetDelete: newQueryRulesetDeleteFunc(t), @@ -744,7 +775,10 @@ func New(t Transport) *API { SynonymsPutSynonymRule: newSynonymsPutSynonymRuleFunc(t), TermsEnum: newTermsEnumFunc(t), Termvectors: newTermvectorsFunc(t), + TextStructureFindFieldStructure: newTextStructureFindFieldStructureFunc(t), + TextStructureFindMessageStructure: newTextStructureFindMessageStructureFunc(t), TextStructureFindStructure: newTextStructureFindStructureFunc(t), + TextStructureTestGrokPattern: newTextStructureTestGrokPatternFunc(t), TransformDeleteTransform: newTransformDeleteTransformFunc(t), TransformGetTransform: newTransformGetTransformFunc(t), TransformGetTransformStats: newTransformGetTransformStatsFunc(t), @@ -852,6 +886,7 @@ func New(t Transport) *API { Recovery: newIndicesRecoveryFunc(t), Refresh: newIndicesRefreshFunc(t), ReloadSearchAnalyzers: newIndicesReloadSearchAnalyzersFunc(t), + ResolveCluster: newIndicesResolveClusterFunc(t), ResolveIndex: newIndicesResolveIndexFunc(t), Rollover: newIndicesRolloverFunc(t), Segments: newIndicesSegmentsFunc(t), @@ -1087,6 +1122,7 @@ func New(t Transport) *API { PutRole: newSecurityPutRoleFunc(t), PutUser: newSecurityPutUserFunc(t), QueryAPIKeys: newSecurityQueryAPIKeysFunc(t), + QueryUser: newSecurityQueryUserFunc(t), SamlAuthenticate: newSecuritySamlAuthenticateFunc(t), SamlCompleteLogout: newSecuritySamlCompleteLogoutFunc(t), SamlInvalidate: newSecuritySamlInvalidateFunc(t), diff --git a/esapi/api.bulk.go b/esapi/api.bulk.go index 5343edff95..8952d23a64 100644 --- a/esapi/api.bulk.go +++ b/esapi/api.bulk.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -60,6 +60,7 @@ type BulkRequest struct { Pipeline string Refresh string RequireAlias *bool + RequireDataStream *bool Routing string Source []string SourceExcludes []string @@ -129,6 +130,10 @@ func (r BulkRequest) Do(providedCtx context.Context, transport Transport) (*Resp params["require_alias"] = strconv.FormatBool(*r.RequireAlias) } + if r.RequireDataStream != nil { + params["require_data_stream"] = strconv.FormatBool(*r.RequireDataStream) + } + if r.Routing != "" { params["routing"] = r.Routing } @@ -277,6 +282,13 @@ func (f Bulk) WithRequireAlias(v bool) func(*BulkRequest) { } } +// WithRequireDataStream - when true, requires the destination to be a data stream (existing or to-be-created). default is false. +func (f Bulk) WithRequireDataStream(v bool) func(*BulkRequest) { + return func(r *BulkRequest) { + r.RequireDataStream = &v + } +} + // WithRouting - specific routing value. func (f Bulk) WithRouting(v string) func(*BulkRequest) { return func(r *BulkRequest) { diff --git a/esapi/api.cat.aliases.go b/esapi/api.cat.aliases.go index 1c1d79dd7d..28f7993052 100644 --- a/esapi/api.cat.aliases.go +++ b/esapi/api.cat.aliases.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.allocation.go b/esapi/api.cat.allocation.go index 4367144700..1e7221ab3a 100644 --- a/esapi/api.cat.allocation.go +++ b/esapi/api.cat.allocation.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.component_templates.go b/esapi/api.cat.component_templates.go index d305d99ab9..9cdbbed830 100644 --- a/esapi/api.cat.component_templates.go +++ b/esapi/api.cat.component_templates.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.count.go b/esapi/api.cat.count.go index 1a228dc7c2..f96d47edb3 100644 --- a/esapi/api.cat.count.go +++ b/esapi/api.cat.count.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.fielddata.go b/esapi/api.cat.fielddata.go index 60c570915d..3dc35dcecb 100644 --- a/esapi/api.cat.fielddata.go +++ b/esapi/api.cat.fielddata.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.health.go b/esapi/api.cat.health.go index e4cae77dad..79ade9baac 100644 --- a/esapi/api.cat.health.go +++ b/esapi/api.cat.health.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.help.go b/esapi/api.cat.help.go index ba7721c50a..e825ddbc02 100644 --- a/esapi/api.cat.help.go +++ b/esapi/api.cat.help.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.indices.go b/esapi/api.cat.indices.go index ec1c1cf13f..e8afd12a73 100644 --- a/esapi/api.cat.indices.go +++ b/esapi/api.cat.indices.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.master.go b/esapi/api.cat.master.go index 743ccbc9fe..5fffb05799 100644 --- a/esapi/api.cat.master.go +++ b/esapi/api.cat.master.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.nodeattrs.go b/esapi/api.cat.nodeattrs.go index 086b74fa5c..87b4f484fa 100644 --- a/esapi/api.cat.nodeattrs.go +++ b/esapi/api.cat.nodeattrs.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.nodes.go b/esapi/api.cat.nodes.go index aa7bc16312..6754491e78 100644 --- a/esapi/api.cat.nodes.go +++ b/esapi/api.cat.nodes.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.pending_tasks.go b/esapi/api.cat.pending_tasks.go index eb41a62d3a..4f971bc65f 100644 --- a/esapi/api.cat.pending_tasks.go +++ b/esapi/api.cat.pending_tasks.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.plugins.go b/esapi/api.cat.plugins.go index 14dede3ea4..65db3f794b 100644 --- a/esapi/api.cat.plugins.go +++ b/esapi/api.cat.plugins.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.recovery.go b/esapi/api.cat.recovery.go index 0c92488c23..d1b75a53bd 100644 --- a/esapi/api.cat.recovery.go +++ b/esapi/api.cat.recovery.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.repositories.go b/esapi/api.cat.repositories.go index 63a33ad031..b9234bb53b 100644 --- a/esapi/api.cat.repositories.go +++ b/esapi/api.cat.repositories.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.segments.go b/esapi/api.cat.segments.go index d86bc0ff2a..892d38cbc1 100644 --- a/esapi/api.cat.segments.go +++ b/esapi/api.cat.segments.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.shards.go b/esapi/api.cat.shards.go index befb56bdf9..bcb270828f 100644 --- a/esapi/api.cat.shards.go +++ b/esapi/api.cat.shards.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.snapshots.go b/esapi/api.cat.snapshots.go index 514933b446..c572a982b1 100644 --- a/esapi/api.cat.snapshots.go +++ b/esapi/api.cat.snapshots.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.tasks.go b/esapi/api.cat.tasks.go index 2fb0798347..8676cf9078 100644 --- a/esapi/api.cat.tasks.go +++ b/esapi/api.cat.tasks.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.templates.go b/esapi/api.cat.templates.go index 8e9574e90c..e967175a15 100644 --- a/esapi/api.cat.templates.go +++ b/esapi/api.cat.templates.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.thread_pool.go b/esapi/api.cat.thread_pool.go index 91021e0f00..816f6c5a0c 100644 --- a/esapi/api.cat.thread_pool.go +++ b/esapi/api.cat.thread_pool.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.clear_scroll.go b/esapi/api.clear_scroll.go index d3d299aa5e..e4331102bc 100644 --- a/esapi/api.clear_scroll.go +++ b/esapi/api.clear_scroll.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.allocation_explain.go b/esapi/api.cluster.allocation_explain.go index 2cf86b3ef7..49dcd6d60f 100644 --- a/esapi/api.cluster.allocation_explain.go +++ b/esapi/api.cluster.allocation_explain.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.delete_component_template.go b/esapi/api.cluster.delete_component_template.go index 9bfb309437..3f58f06d8d 100644 --- a/esapi/api.cluster.delete_component_template.go +++ b/esapi/api.cluster.delete_component_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.delete_voting_config_exclusions.go b/esapi/api.cluster.delete_voting_config_exclusions.go index be6a72d809..a8357ac7f7 100644 --- a/esapi/api.cluster.delete_voting_config_exclusions.go +++ b/esapi/api.cluster.delete_voting_config_exclusions.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.exists_component_template.go b/esapi/api.cluster.exists_component_template.go index 46ff5dca96..4089fde39d 100644 --- a/esapi/api.cluster.exists_component_template.go +++ b/esapi/api.cluster.exists_component_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.get_component_template.go b/esapi/api.cluster.get_component_template.go index 0a14eaefef..77071d58ed 100644 --- a/esapi/api.cluster.get_component_template.go +++ b/esapi/api.cluster.get_component_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.get_settings.go b/esapi/api.cluster.get_settings.go index f944e81297..fd6278d54b 100644 --- a/esapi/api.cluster.get_settings.go +++ b/esapi/api.cluster.get_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.health.go b/esapi/api.cluster.health.go index 3833b78b19..744348326c 100644 --- a/esapi/api.cluster.health.go +++ b/esapi/api.cluster.health.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.info.go b/esapi/api.cluster.info.go index a2fc94e308..6ada054f37 100644 --- a/esapi/api.cluster.info.go +++ b/esapi/api.cluster.info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.pending_tasks.go b/esapi/api.cluster.pending_tasks.go index 9d4aa3ef82..58d9db2a39 100644 --- a/esapi/api.cluster.pending_tasks.go +++ b/esapi/api.cluster.pending_tasks.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.post_voting_config_exclusions.go b/esapi/api.cluster.post_voting_config_exclusions.go index 8cae2ff3d0..6bed21496c 100644 --- a/esapi/api.cluster.post_voting_config_exclusions.go +++ b/esapi/api.cluster.post_voting_config_exclusions.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.put_component_template.go b/esapi/api.cluster.put_component_template.go index 0f400bc336..6a2f38114a 100644 --- a/esapi/api.cluster.put_component_template.go +++ b/esapi/api.cluster.put_component_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.put_settings.go b/esapi/api.cluster.put_settings.go index e83014f724..f70f2a7b92 100644 --- a/esapi/api.cluster.put_settings.go +++ b/esapi/api.cluster.put_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.remote_info.go b/esapi/api.cluster.remote_info.go index 26e40816cc..9c30bd4ab4 100644 --- a/esapi/api.cluster.remote_info.go +++ b/esapi/api.cluster.remote_info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.reroute.go b/esapi/api.cluster.reroute.go index ba59117f4a..c8510e7719 100644 --- a/esapi/api.cluster.reroute.go +++ b/esapi/api.cluster.reroute.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.state.go b/esapi/api.cluster.state.go index 52de2fcbe1..1926dd30d5 100644 --- a/esapi/api.cluster.state.go +++ b/esapi/api.cluster.state.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.stats.go b/esapi/api.cluster.stats.go index 16f23731b0..335cb77003 100644 --- a/esapi/api.cluster.stats.go +++ b/esapi/api.cluster.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.check_in.go b/esapi/api.connector.check_in.go index 0c7cc94111..7d0f66a11a 100644 --- a/esapi/api.connector.check_in.go +++ b/esapi/api.connector.check_in.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.delete.go b/esapi/api.connector.delete.go index 0081d1dc55..df9f221f9e 100644 --- a/esapi/api.connector.delete.go +++ b/esapi/api.connector.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.get.go b/esapi/api.connector.get.go index 231f0db33e..6e27967228 100644 --- a/esapi/api.connector.get.go +++ b/esapi/api.connector.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.last_sync.go b/esapi/api.connector.last_sync.go index 340a9fed1b..9b39d5b153 100644 --- a/esapi/api.connector.last_sync.go +++ b/esapi/api.connector.last_sync.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.list.go b/esapi/api.connector.list.go index bc8b0c9396..19cc2c8ca9 100644 --- a/esapi/api.connector.list.go +++ b/esapi/api.connector.list.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -52,8 +52,12 @@ type ConnectorList func(o ...func(*ConnectorListRequest)) (*Response, error) // ConnectorListRequest configures the Connector List API request. type ConnectorListRequest struct { - From *int - Size *int + ConnectorName []string + From *int + IndexName []string + Query string + ServiceType []string + Size *int Pretty bool Human bool @@ -92,10 +96,26 @@ func (r ConnectorListRequest) Do(providedCtx context.Context, transport Transpor params = make(map[string]string) + if len(r.ConnectorName) > 0 { + params["connector_name"] = strings.Join(r.ConnectorName, ",") + } + if r.From != nil { params["from"] = strconv.FormatInt(int64(*r.From), 10) } + if len(r.IndexName) > 0 { + params["index_name"] = strings.Join(r.IndexName, ",") + } + + if r.Query != "" { + params["query"] = r.Query + } + + if len(r.ServiceType) > 0 { + params["service_type"] = strings.Join(r.ServiceType, ",") + } + if r.Size != nil { params["size"] = strconv.FormatInt(int64(*r.Size), 10) } @@ -178,6 +198,13 @@ func (f ConnectorList) WithContext(v context.Context) func(*ConnectorListRequest } } +// WithConnectorName - a list of connector names to fetch connector documents for. +func (f ConnectorList) WithConnectorName(v ...string) func(*ConnectorListRequest) { + return func(r *ConnectorListRequest) { + r.ConnectorName = v + } +} + // WithFrom - starting offset (default: 0). func (f ConnectorList) WithFrom(v int) func(*ConnectorListRequest) { return func(r *ConnectorListRequest) { @@ -185,6 +212,27 @@ func (f ConnectorList) WithFrom(v int) func(*ConnectorListRequest) { } } +// WithIndexName - a list of connector index names to fetch connector documents for. +func (f ConnectorList) WithIndexName(v ...string) func(*ConnectorListRequest) { + return func(r *ConnectorListRequest) { + r.IndexName = v + } +} + +// WithQuery - a search string for querying connectors, filtering results by matching against connector names, descriptions, and index names. +func (f ConnectorList) WithQuery(v string) func(*ConnectorListRequest) { + return func(r *ConnectorListRequest) { + r.Query = v + } +} + +// WithServiceType - a list of connector service types to fetch connector documents for. +func (f ConnectorList) WithServiceType(v ...string) func(*ConnectorListRequest) { + return func(r *ConnectorListRequest) { + r.ServiceType = v + } +} + // WithSize - specifies a max number of results to get (default: 100). func (f ConnectorList) WithSize(v int) func(*ConnectorListRequest) { return func(r *ConnectorListRequest) { diff --git a/esapi/api.connector.post.go b/esapi/api.connector.post.go index 039ea9fe37..e7db223c61 100644 --- a/esapi/api.connector.post.go +++ b/esapi/api.connector.post.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.put.go b/esapi/api.connector.put.go index 51b4155662..3951115c42 100644 --- a/esapi/api.connector.put.go +++ b/esapi/api.connector.put.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_api_key_id.go b/esapi/api.connector.update_api_key_id.go new file mode 100644 index 0000000000..eb9418703e --- /dev/null +++ b/esapi/api.connector.update_api_key_id.go @@ -0,0 +1,237 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newConnectorUpdateAPIKeyDocumentIDFunc(t Transport) ConnectorUpdateAPIKeyDocumentID { + return func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateAPIKeyDocumentIDRequest)) (*Response, error) { + var r = ConnectorUpdateAPIKeyDocumentIDRequest{Body: body, ConnectorID: connector_id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ConnectorUpdateAPIKeyDocumentID updates the API key id and/or API key secret id fields in the connector document. +// +// This API is experimental. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-api-key-id-api.html. +type ConnectorUpdateAPIKeyDocumentID func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateAPIKeyDocumentIDRequest)) (*Response, error) + +// ConnectorUpdateAPIKeyDocumentIDRequest configures the Connector UpdateAPI Key DocumentI D API request. +type ConnectorUpdateAPIKeyDocumentIDRequest struct { + Body io.Reader + + ConnectorID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ConnectorUpdateAPIKeyDocumentIDRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "connector.update_api_key_id") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "PUT" + + path.Grow(7 + 1 + len("_connector") + 1 + len(r.ConnectorID) + 1 + len("_api_key_id")) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_connector") + path.WriteString("/") + path.WriteString(r.ConnectorID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "connector_id", r.ConnectorID) + } + path.WriteString("/") + path.WriteString("_api_key_id") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "connector.update_api_key_id") + if reader := instrument.RecordRequestBody(ctx, "connector.update_api_key_id", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "connector.update_api_key_id") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ConnectorUpdateAPIKeyDocumentID) WithContext(v context.Context) func(*ConnectorUpdateAPIKeyDocumentIDRequest) { + return func(r *ConnectorUpdateAPIKeyDocumentIDRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ConnectorUpdateAPIKeyDocumentID) WithPretty() func(*ConnectorUpdateAPIKeyDocumentIDRequest) { + return func(r *ConnectorUpdateAPIKeyDocumentIDRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ConnectorUpdateAPIKeyDocumentID) WithHuman() func(*ConnectorUpdateAPIKeyDocumentIDRequest) { + return func(r *ConnectorUpdateAPIKeyDocumentIDRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ConnectorUpdateAPIKeyDocumentID) WithErrorTrace() func(*ConnectorUpdateAPIKeyDocumentIDRequest) { + return func(r *ConnectorUpdateAPIKeyDocumentIDRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ConnectorUpdateAPIKeyDocumentID) WithFilterPath(v ...string) func(*ConnectorUpdateAPIKeyDocumentIDRequest) { + return func(r *ConnectorUpdateAPIKeyDocumentIDRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ConnectorUpdateAPIKeyDocumentID) WithHeader(h map[string]string) func(*ConnectorUpdateAPIKeyDocumentIDRequest) { + return func(r *ConnectorUpdateAPIKeyDocumentIDRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ConnectorUpdateAPIKeyDocumentID) WithOpaqueID(s string) func(*ConnectorUpdateAPIKeyDocumentIDRequest) { + return func(r *ConnectorUpdateAPIKeyDocumentIDRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.connector.update_configuration.go b/esapi/api.connector.update_configuration.go index 9082be76fe..a3ee023fbf 100644 --- a/esapi/api.connector.update_configuration.go +++ b/esapi/api.connector.update_configuration.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_error.go b/esapi/api.connector.update_error.go index 0a078a6b42..a37abbdaa8 100644 --- a/esapi/api.connector.update_error.go +++ b/esapi/api.connector.update_error.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_filtering.go b/esapi/api.connector.update_filtering.go index ef4014f39c..e8074202d9 100644 --- a/esapi/api.connector.update_filtering.go +++ b/esapi/api.connector.update_filtering.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_index_name.go b/esapi/api.connector.update_index_name.go new file mode 100644 index 0000000000..7855c3eb78 --- /dev/null +++ b/esapi/api.connector.update_index_name.go @@ -0,0 +1,237 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newConnectorUpdateIndexNameFunc(t Transport) ConnectorUpdateIndexName { + return func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateIndexNameRequest)) (*Response, error) { + var r = ConnectorUpdateIndexNameRequest{Body: body, ConnectorID: connector_id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ConnectorUpdateIndexName updates the index name of the connector. +// +// This API is experimental. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-index-name-api.html. +type ConnectorUpdateIndexName func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateIndexNameRequest)) (*Response, error) + +// ConnectorUpdateIndexNameRequest configures the Connector Update Index Name API request. +type ConnectorUpdateIndexNameRequest struct { + Body io.Reader + + ConnectorID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ConnectorUpdateIndexNameRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "connector.update_index_name") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "PUT" + + path.Grow(7 + 1 + len("_connector") + 1 + len(r.ConnectorID) + 1 + len("_index_name")) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_connector") + path.WriteString("/") + path.WriteString(r.ConnectorID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "connector_id", r.ConnectorID) + } + path.WriteString("/") + path.WriteString("_index_name") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "connector.update_index_name") + if reader := instrument.RecordRequestBody(ctx, "connector.update_index_name", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "connector.update_index_name") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ConnectorUpdateIndexName) WithContext(v context.Context) func(*ConnectorUpdateIndexNameRequest) { + return func(r *ConnectorUpdateIndexNameRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ConnectorUpdateIndexName) WithPretty() func(*ConnectorUpdateIndexNameRequest) { + return func(r *ConnectorUpdateIndexNameRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ConnectorUpdateIndexName) WithHuman() func(*ConnectorUpdateIndexNameRequest) { + return func(r *ConnectorUpdateIndexNameRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ConnectorUpdateIndexName) WithErrorTrace() func(*ConnectorUpdateIndexNameRequest) { + return func(r *ConnectorUpdateIndexNameRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ConnectorUpdateIndexName) WithFilterPath(v ...string) func(*ConnectorUpdateIndexNameRequest) { + return func(r *ConnectorUpdateIndexNameRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ConnectorUpdateIndexName) WithHeader(h map[string]string) func(*ConnectorUpdateIndexNameRequest) { + return func(r *ConnectorUpdateIndexNameRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ConnectorUpdateIndexName) WithOpaqueID(s string) func(*ConnectorUpdateIndexNameRequest) { + return func(r *ConnectorUpdateIndexNameRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.connector.update_name.go b/esapi/api.connector.update_name.go index 0ba9e136e2..01c770ff03 100644 --- a/esapi/api.connector.update_name.go +++ b/esapi/api.connector.update_name.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_native.go b/esapi/api.connector.update_native.go new file mode 100644 index 0000000000..914024c950 --- /dev/null +++ b/esapi/api.connector.update_native.go @@ -0,0 +1,237 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newConnectorUpdateNativeFunc(t Transport) ConnectorUpdateNative { + return func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateNativeRequest)) (*Response, error) { + var r = ConnectorUpdateNativeRequest{Body: body, ConnectorID: connector_id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ConnectorUpdateNative updates the is_native flag of the connector. +// +// This API is experimental. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/connector-apis.html. +type ConnectorUpdateNative func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateNativeRequest)) (*Response, error) + +// ConnectorUpdateNativeRequest configures the Connector Update Native API request. +type ConnectorUpdateNativeRequest struct { + Body io.Reader + + ConnectorID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ConnectorUpdateNativeRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "connector.update_native") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "PUT" + + path.Grow(7 + 1 + len("_connector") + 1 + len(r.ConnectorID) + 1 + len("_native")) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_connector") + path.WriteString("/") + path.WriteString(r.ConnectorID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "connector_id", r.ConnectorID) + } + path.WriteString("/") + path.WriteString("_native") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "connector.update_native") + if reader := instrument.RecordRequestBody(ctx, "connector.update_native", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "connector.update_native") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ConnectorUpdateNative) WithContext(v context.Context) func(*ConnectorUpdateNativeRequest) { + return func(r *ConnectorUpdateNativeRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ConnectorUpdateNative) WithPretty() func(*ConnectorUpdateNativeRequest) { + return func(r *ConnectorUpdateNativeRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ConnectorUpdateNative) WithHuman() func(*ConnectorUpdateNativeRequest) { + return func(r *ConnectorUpdateNativeRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ConnectorUpdateNative) WithErrorTrace() func(*ConnectorUpdateNativeRequest) { + return func(r *ConnectorUpdateNativeRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ConnectorUpdateNative) WithFilterPath(v ...string) func(*ConnectorUpdateNativeRequest) { + return func(r *ConnectorUpdateNativeRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ConnectorUpdateNative) WithHeader(h map[string]string) func(*ConnectorUpdateNativeRequest) { + return func(r *ConnectorUpdateNativeRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ConnectorUpdateNative) WithOpaqueID(s string) func(*ConnectorUpdateNativeRequest) { + return func(r *ConnectorUpdateNativeRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.connector.update_pipeline.go b/esapi/api.connector.update_pipeline.go index b165a19137..8c565a8a14 100644 --- a/esapi/api.connector.update_pipeline.go +++ b/esapi/api.connector.update_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_scheduling.go b/esapi/api.connector.update_scheduling.go index 90dcc27150..dba1f573f1 100644 --- a/esapi/api.connector.update_scheduling.go +++ b/esapi/api.connector.update_scheduling.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_service_type.go b/esapi/api.connector.update_service_type.go new file mode 100644 index 0000000000..d0d0abc2d1 --- /dev/null +++ b/esapi/api.connector.update_service_type.go @@ -0,0 +1,237 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newConnectorUpdateServiceDocumentTypeFunc(t Transport) ConnectorUpdateServiceDocumentType { + return func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateServiceDocumentTypeRequest)) (*Response, error) { + var r = ConnectorUpdateServiceDocumentTypeRequest{Body: body, ConnectorID: connector_id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ConnectorUpdateServiceDocumentType updates the service type of the connector. +// +// This API is experimental. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-service-type-api.html. +type ConnectorUpdateServiceDocumentType func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateServiceDocumentTypeRequest)) (*Response, error) + +// ConnectorUpdateServiceDocumentTypeRequest configures the Connector Update Service Document Type API request. +type ConnectorUpdateServiceDocumentTypeRequest struct { + Body io.Reader + + ConnectorID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ConnectorUpdateServiceDocumentTypeRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "connector.update_service_type") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "PUT" + + path.Grow(7 + 1 + len("_connector") + 1 + len(r.ConnectorID) + 1 + len("_service_type")) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_connector") + path.WriteString("/") + path.WriteString(r.ConnectorID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "connector_id", r.ConnectorID) + } + path.WriteString("/") + path.WriteString("_service_type") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "connector.update_service_type") + if reader := instrument.RecordRequestBody(ctx, "connector.update_service_type", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "connector.update_service_type") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ConnectorUpdateServiceDocumentType) WithContext(v context.Context) func(*ConnectorUpdateServiceDocumentTypeRequest) { + return func(r *ConnectorUpdateServiceDocumentTypeRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ConnectorUpdateServiceDocumentType) WithPretty() func(*ConnectorUpdateServiceDocumentTypeRequest) { + return func(r *ConnectorUpdateServiceDocumentTypeRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ConnectorUpdateServiceDocumentType) WithHuman() func(*ConnectorUpdateServiceDocumentTypeRequest) { + return func(r *ConnectorUpdateServiceDocumentTypeRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ConnectorUpdateServiceDocumentType) WithErrorTrace() func(*ConnectorUpdateServiceDocumentTypeRequest) { + return func(r *ConnectorUpdateServiceDocumentTypeRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ConnectorUpdateServiceDocumentType) WithFilterPath(v ...string) func(*ConnectorUpdateServiceDocumentTypeRequest) { + return func(r *ConnectorUpdateServiceDocumentTypeRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ConnectorUpdateServiceDocumentType) WithHeader(h map[string]string) func(*ConnectorUpdateServiceDocumentTypeRequest) { + return func(r *ConnectorUpdateServiceDocumentTypeRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ConnectorUpdateServiceDocumentType) WithOpaqueID(s string) func(*ConnectorUpdateServiceDocumentTypeRequest) { + return func(r *ConnectorUpdateServiceDocumentTypeRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.connector.update_status.go b/esapi/api.connector.update_status.go new file mode 100644 index 0000000000..deb3fe0837 --- /dev/null +++ b/esapi/api.connector.update_status.go @@ -0,0 +1,237 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newConnectorUpdateStatusFunc(t Transport) ConnectorUpdateStatus { + return func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateStatusRequest)) (*Response, error) { + var r = ConnectorUpdateStatusRequest{Body: body, ConnectorID: connector_id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ConnectorUpdateStatus updates the status of the connector. +// +// This API is experimental. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-status-api.html. +type ConnectorUpdateStatus func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateStatusRequest)) (*Response, error) + +// ConnectorUpdateStatusRequest configures the Connector Update Status API request. +type ConnectorUpdateStatusRequest struct { + Body io.Reader + + ConnectorID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ConnectorUpdateStatusRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "connector.update_status") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "PUT" + + path.Grow(7 + 1 + len("_connector") + 1 + len(r.ConnectorID) + 1 + len("_status")) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_connector") + path.WriteString("/") + path.WriteString(r.ConnectorID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "connector_id", r.ConnectorID) + } + path.WriteString("/") + path.WriteString("_status") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "connector.update_status") + if reader := instrument.RecordRequestBody(ctx, "connector.update_status", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "connector.update_status") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ConnectorUpdateStatus) WithContext(v context.Context) func(*ConnectorUpdateStatusRequest) { + return func(r *ConnectorUpdateStatusRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ConnectorUpdateStatus) WithPretty() func(*ConnectorUpdateStatusRequest) { + return func(r *ConnectorUpdateStatusRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ConnectorUpdateStatus) WithHuman() func(*ConnectorUpdateStatusRequest) { + return func(r *ConnectorUpdateStatusRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ConnectorUpdateStatus) WithErrorTrace() func(*ConnectorUpdateStatusRequest) { + return func(r *ConnectorUpdateStatusRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ConnectorUpdateStatus) WithFilterPath(v ...string) func(*ConnectorUpdateStatusRequest) { + return func(r *ConnectorUpdateStatusRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ConnectorUpdateStatus) WithHeader(h map[string]string) func(*ConnectorUpdateStatusRequest) { + return func(r *ConnectorUpdateStatusRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ConnectorUpdateStatus) WithOpaqueID(s string) func(*ConnectorUpdateStatusRequest) { + return func(r *ConnectorUpdateStatusRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.connector_secret.delete.go b/esapi/api.connector_secret.delete.go new file mode 100644 index 0000000000..6f578e2e2e --- /dev/null +++ b/esapi/api.connector_secret.delete.go @@ -0,0 +1,225 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "net/http" + "strings" +) + +func newConnectorSecretDeleteFunc(t Transport) ConnectorSecretDelete { + return func(id string, o ...func(*ConnectorSecretDeleteRequest)) (*Response, error) { + var r = ConnectorSecretDeleteRequest{DocumentID: id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ConnectorSecretDelete deletes a connector secret. +// +// This API is experimental. +type ConnectorSecretDelete func(id string, o ...func(*ConnectorSecretDeleteRequest)) (*Response, error) + +// ConnectorSecretDeleteRequest configures the Connector Secret Delete API request. +type ConnectorSecretDeleteRequest struct { + DocumentID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ConnectorSecretDeleteRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "connector_secret.delete") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "DELETE" + + path.Grow(7 + 1 + len("_connector") + 1 + len("_secret") + 1 + len(r.DocumentID)) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_connector") + path.WriteString("/") + path.WriteString("_secret") + path.WriteString("/") + path.WriteString(r.DocumentID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "id", r.DocumentID) + } + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), nil) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "connector_secret.delete") + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "connector_secret.delete") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ConnectorSecretDelete) WithContext(v context.Context) func(*ConnectorSecretDeleteRequest) { + return func(r *ConnectorSecretDeleteRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ConnectorSecretDelete) WithPretty() func(*ConnectorSecretDeleteRequest) { + return func(r *ConnectorSecretDeleteRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ConnectorSecretDelete) WithHuman() func(*ConnectorSecretDeleteRequest) { + return func(r *ConnectorSecretDeleteRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ConnectorSecretDelete) WithErrorTrace() func(*ConnectorSecretDeleteRequest) { + return func(r *ConnectorSecretDeleteRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ConnectorSecretDelete) WithFilterPath(v ...string) func(*ConnectorSecretDeleteRequest) { + return func(r *ConnectorSecretDeleteRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ConnectorSecretDelete) WithHeader(h map[string]string) func(*ConnectorSecretDeleteRequest) { + return func(r *ConnectorSecretDeleteRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ConnectorSecretDelete) WithOpaqueID(s string) func(*ConnectorSecretDeleteRequest) { + return func(r *ConnectorSecretDeleteRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.connector_secret.get.go b/esapi/api.connector_secret.get.go new file mode 100644 index 0000000000..e9447f9826 --- /dev/null +++ b/esapi/api.connector_secret.get.go @@ -0,0 +1,225 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "net/http" + "strings" +) + +func newConnectorSecretGetFunc(t Transport) ConnectorSecretGet { + return func(id string, o ...func(*ConnectorSecretGetRequest)) (*Response, error) { + var r = ConnectorSecretGetRequest{DocumentID: id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ConnectorSecretGet retrieves a secret stored by Connectors. +// +// This API is experimental. +type ConnectorSecretGet func(id string, o ...func(*ConnectorSecretGetRequest)) (*Response, error) + +// ConnectorSecretGetRequest configures the Connector Secret Get API request. +type ConnectorSecretGetRequest struct { + DocumentID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ConnectorSecretGetRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "connector_secret.get") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "GET" + + path.Grow(7 + 1 + len("_connector") + 1 + len("_secret") + 1 + len(r.DocumentID)) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_connector") + path.WriteString("/") + path.WriteString("_secret") + path.WriteString("/") + path.WriteString(r.DocumentID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "id", r.DocumentID) + } + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), nil) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "connector_secret.get") + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "connector_secret.get") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ConnectorSecretGet) WithContext(v context.Context) func(*ConnectorSecretGetRequest) { + return func(r *ConnectorSecretGetRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ConnectorSecretGet) WithPretty() func(*ConnectorSecretGetRequest) { + return func(r *ConnectorSecretGetRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ConnectorSecretGet) WithHuman() func(*ConnectorSecretGetRequest) { + return func(r *ConnectorSecretGetRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ConnectorSecretGet) WithErrorTrace() func(*ConnectorSecretGetRequest) { + return func(r *ConnectorSecretGetRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ConnectorSecretGet) WithFilterPath(v ...string) func(*ConnectorSecretGetRequest) { + return func(r *ConnectorSecretGetRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ConnectorSecretGet) WithHeader(h map[string]string) func(*ConnectorSecretGetRequest) { + return func(r *ConnectorSecretGetRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ConnectorSecretGet) WithOpaqueID(s string) func(*ConnectorSecretGetRequest) { + return func(r *ConnectorSecretGetRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.connector_secret.post.go b/esapi/api.connector_secret.post.go new file mode 100644 index 0000000000..f698358b44 --- /dev/null +++ b/esapi/api.connector_secret.post.go @@ -0,0 +1,225 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newConnectorSecretPostFunc(t Transport) ConnectorSecretPost { + return func(body io.Reader, o ...func(*ConnectorSecretPostRequest)) (*Response, error) { + var r = ConnectorSecretPostRequest{Body: body} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ConnectorSecretPost creates a secret for a Connector. +// +// This API is experimental. +type ConnectorSecretPost func(body io.Reader, o ...func(*ConnectorSecretPostRequest)) (*Response, error) + +// ConnectorSecretPostRequest configures the Connector Secret Post API request. +type ConnectorSecretPostRequest struct { + Body io.Reader + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ConnectorSecretPostRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "connector_secret.post") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "POST" + + path.Grow(7 + len("/_connector/_secret")) + path.WriteString("http://") + path.WriteString("/_connector/_secret") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "connector_secret.post") + if reader := instrument.RecordRequestBody(ctx, "connector_secret.post", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "connector_secret.post") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ConnectorSecretPost) WithContext(v context.Context) func(*ConnectorSecretPostRequest) { + return func(r *ConnectorSecretPostRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ConnectorSecretPost) WithPretty() func(*ConnectorSecretPostRequest) { + return func(r *ConnectorSecretPostRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ConnectorSecretPost) WithHuman() func(*ConnectorSecretPostRequest) { + return func(r *ConnectorSecretPostRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ConnectorSecretPost) WithErrorTrace() func(*ConnectorSecretPostRequest) { + return func(r *ConnectorSecretPostRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ConnectorSecretPost) WithFilterPath(v ...string) func(*ConnectorSecretPostRequest) { + return func(r *ConnectorSecretPostRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ConnectorSecretPost) WithHeader(h map[string]string) func(*ConnectorSecretPostRequest) { + return func(r *ConnectorSecretPostRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ConnectorSecretPost) WithOpaqueID(s string) func(*ConnectorSecretPostRequest) { + return func(r *ConnectorSecretPostRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.connector_secret.put.go b/esapi/api.connector_secret.put.go new file mode 100644 index 0000000000..912e3c3211 --- /dev/null +++ b/esapi/api.connector_secret.put.go @@ -0,0 +1,235 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newConnectorSecretPutFunc(t Transport) ConnectorSecretPut { + return func(id string, body io.Reader, o ...func(*ConnectorSecretPutRequest)) (*Response, error) { + var r = ConnectorSecretPutRequest{DocumentID: id, Body: body} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ConnectorSecretPut creates or updates a secret for a Connector. +// +// This API is experimental. +type ConnectorSecretPut func(id string, body io.Reader, o ...func(*ConnectorSecretPutRequest)) (*Response, error) + +// ConnectorSecretPutRequest configures the Connector Secret Put API request. +type ConnectorSecretPutRequest struct { + DocumentID string + + Body io.Reader + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ConnectorSecretPutRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "connector_secret.put") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "PUT" + + path.Grow(7 + 1 + len("_connector") + 1 + len("_secret") + 1 + len(r.DocumentID)) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_connector") + path.WriteString("/") + path.WriteString("_secret") + path.WriteString("/") + path.WriteString(r.DocumentID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "id", r.DocumentID) + } + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "connector_secret.put") + if reader := instrument.RecordRequestBody(ctx, "connector_secret.put", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "connector_secret.put") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ConnectorSecretPut) WithContext(v context.Context) func(*ConnectorSecretPutRequest) { + return func(r *ConnectorSecretPutRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ConnectorSecretPut) WithPretty() func(*ConnectorSecretPutRequest) { + return func(r *ConnectorSecretPutRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ConnectorSecretPut) WithHuman() func(*ConnectorSecretPutRequest) { + return func(r *ConnectorSecretPutRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ConnectorSecretPut) WithErrorTrace() func(*ConnectorSecretPutRequest) { + return func(r *ConnectorSecretPutRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ConnectorSecretPut) WithFilterPath(v ...string) func(*ConnectorSecretPutRequest) { + return func(r *ConnectorSecretPutRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ConnectorSecretPut) WithHeader(h map[string]string) func(*ConnectorSecretPutRequest) { + return func(r *ConnectorSecretPutRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ConnectorSecretPut) WithOpaqueID(s string) func(*ConnectorSecretPutRequest) { + return func(r *ConnectorSecretPutRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.connector_sync_job.cancel.go b/esapi/api.connector_sync_job.cancel.go index f7a5e99bd2..c680135162 100644 --- a/esapi/api.connector_sync_job.cancel.go +++ b/esapi/api.connector_sync_job.cancel.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector_sync_job.check_in.go b/esapi/api.connector_sync_job.check_in.go index d8ebc7d6ae..335eec5128 100644 --- a/esapi/api.connector_sync_job.check_in.go +++ b/esapi/api.connector_sync_job.check_in.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector_sync_job.delete.go b/esapi/api.connector_sync_job.delete.go index 358f57ab20..0c29dd3645 100644 --- a/esapi/api.connector_sync_job.delete.go +++ b/esapi/api.connector_sync_job.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector_sync_job.error.go b/esapi/api.connector_sync_job.error.go index 5883f59d66..57e47f9e21 100644 --- a/esapi/api.connector_sync_job.error.go +++ b/esapi/api.connector_sync_job.error.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector_sync_job.get.go b/esapi/api.connector_sync_job.get.go index 668fd2a871..a05bd81170 100644 --- a/esapi/api.connector_sync_job.get.go +++ b/esapi/api.connector_sync_job.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector_sync_job.list.go b/esapi/api.connector_sync_job.list.go index fe4ddf627b..5097e1d8ac 100644 --- a/esapi/api.connector_sync_job.list.go +++ b/esapi/api.connector_sync_job.list.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -54,6 +54,7 @@ type ConnectorSyncJobList func(o ...func(*ConnectorSyncJobListRequest)) (*Respon type ConnectorSyncJobListRequest struct { ConnectorID string From *int + JobType []string Size *int Status string @@ -102,6 +103,10 @@ func (r ConnectorSyncJobListRequest) Do(providedCtx context.Context, transport T params["from"] = strconv.FormatInt(int64(*r.From), 10) } + if len(r.JobType) > 0 { + params["job_type"] = strings.Join(r.JobType, ",") + } + if r.Size != nil { params["size"] = strconv.FormatInt(int64(*r.Size), 10) } @@ -202,6 +207,13 @@ func (f ConnectorSyncJobList) WithFrom(v int) func(*ConnectorSyncJobListRequest) } } +// WithJobType - a list of job types. +func (f ConnectorSyncJobList) WithJobType(v ...string) func(*ConnectorSyncJobListRequest) { + return func(r *ConnectorSyncJobListRequest) { + r.JobType = v + } +} + // WithSize - specifies a max number of results to get (default: 100). func (f ConnectorSyncJobList) WithSize(v int) func(*ConnectorSyncJobListRequest) { return func(r *ConnectorSyncJobListRequest) { diff --git a/esapi/api.connector_sync_job.post.go b/esapi/api.connector_sync_job.post.go index e581587565..3eabd41cfa 100644 --- a/esapi/api.connector_sync_job.post.go +++ b/esapi/api.connector_sync_job.post.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector_sync_job.update_stats.go b/esapi/api.connector_sync_job.update_stats.go index 609fe48566..63e00f266d 100644 --- a/esapi/api.connector_sync_job.update_stats.go +++ b/esapi/api.connector_sync_job.update_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.count.go b/esapi/api.count.go index 312e5dd262..c4fbbd349c 100644 --- a/esapi/api.count.go +++ b/esapi/api.count.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.create.go b/esapi/api.create.go index 4b811a0593..8520c24906 100644 --- a/esapi/api.create.go +++ b/esapi/api.create.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.dangling_indices.delete_dangling_index.go b/esapi/api.dangling_indices.delete_dangling_index.go index 01fb268552..73ae365a64 100644 --- a/esapi/api.dangling_indices.delete_dangling_index.go +++ b/esapi/api.dangling_indices.delete_dangling_index.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.dangling_indices.import_dangling_index.go b/esapi/api.dangling_indices.import_dangling_index.go index c0a39de567..b3281b924f 100644 --- a/esapi/api.dangling_indices.import_dangling_index.go +++ b/esapi/api.dangling_indices.import_dangling_index.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.dangling_indices.list_dangling_indices.go b/esapi/api.dangling_indices.list_dangling_indices.go index 768d11f424..381f183118 100644 --- a/esapi/api.dangling_indices.list_dangling_indices.go +++ b/esapi/api.dangling_indices.list_dangling_indices.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.delete.go b/esapi/api.delete.go index 1c1a185b90..afe2be5b9c 100644 --- a/esapi/api.delete.go +++ b/esapi/api.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.delete_by_query.go b/esapi/api.delete_by_query.go index 601dca9b83..7119786507 100644 --- a/esapi/api.delete_by_query.go +++ b/esapi/api.delete_by_query.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.delete_by_query_rethrottle.go b/esapi/api.delete_by_query_rethrottle.go index 71b9e79fb2..4e769033dc 100644 --- a/esapi/api.delete_by_query_rethrottle.go +++ b/esapi/api.delete_by_query_rethrottle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.delete_script.go b/esapi/api.delete_script.go index 5ef41c35d9..5959779247 100644 --- a/esapi/api.delete_script.go +++ b/esapi/api.delete_script.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.exists.go b/esapi/api.exists.go index 6137571d45..1473fba103 100644 --- a/esapi/api.exists.go +++ b/esapi/api.exists.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.exists_source.go b/esapi/api.exists_source.go index 274b180618..04486c912a 100644 --- a/esapi/api.exists_source.go +++ b/esapi/api.exists_source.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.explain.go b/esapi/api.explain.go index 716f9b5e4f..ebec6fce29 100644 --- a/esapi/api.explain.go +++ b/esapi/api.explain.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.features.get_features.go b/esapi/api.features.get_features.go index 21f5f8359e..955901cbd5 100644 --- a/esapi/api.features.get_features.go +++ b/esapi/api.features.get_features.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.features.reset_features.go b/esapi/api.features.reset_features.go index 6ca0610b0e..2f59a56acc 100644 --- a/esapi/api.features.reset_features.go +++ b/esapi/api.features.reset_features.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.field_caps.go b/esapi/api.field_caps.go index 4b00e26b09..8844d94f80 100644 --- a/esapi/api.field_caps.go +++ b/esapi/api.field_caps.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -55,13 +55,14 @@ type FieldCapsRequest struct { Body io.Reader - AllowNoIndices *bool - ExpandWildcards string - Fields []string - Filters []string - IgnoreUnavailable *bool - IncludeUnmapped *bool - Types []string + AllowNoIndices *bool + ExpandWildcards string + Fields []string + Filters []string + IgnoreUnavailable *bool + IncludeEmptyFields *bool + IncludeUnmapped *bool + Types []string Pretty bool Human bool @@ -128,6 +129,10 @@ func (r FieldCapsRequest) Do(providedCtx context.Context, transport Transport) ( params["ignore_unavailable"] = strconv.FormatBool(*r.IgnoreUnavailable) } + if r.IncludeEmptyFields != nil { + params["include_empty_fields"] = strconv.FormatBool(*r.IncludeEmptyFields) + } + if r.IncludeUnmapped != nil { params["include_unmapped"] = strconv.FormatBool(*r.IncludeUnmapped) } @@ -270,6 +275,13 @@ func (f FieldCaps) WithIgnoreUnavailable(v bool) func(*FieldCapsRequest) { } } +// WithIncludeEmptyFields - include empty fields in result. +func (f FieldCaps) WithIncludeEmptyFields(v bool) func(*FieldCapsRequest) { + return func(r *FieldCapsRequest) { + r.IncludeEmptyFields = &v + } +} + // WithIncludeUnmapped - indicates whether unmapped fields should be included in the response.. func (f FieldCaps) WithIncludeUnmapped(v bool) func(*FieldCapsRequest) { return func(r *FieldCapsRequest) { diff --git a/esapi/api.fleet.delete_secret.go b/esapi/api.fleet.delete_secret.go index d661c11225..ea600c05a2 100644 --- a/esapi/api.fleet.delete_secret.go +++ b/esapi/api.fleet.delete_secret.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.fleet.get_secret.go b/esapi/api.fleet.get_secret.go index 8c21b4ebd0..9dd92c0bfe 100644 --- a/esapi/api.fleet.get_secret.go +++ b/esapi/api.fleet.get_secret.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.fleet.global_checkpoints.go b/esapi/api.fleet.global_checkpoints.go index 859624fda9..5def5ced62 100644 --- a/esapi/api.fleet.global_checkpoints.go +++ b/esapi/api.fleet.global_checkpoints.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.fleet.msearch.go b/esapi/api.fleet.msearch.go index cb5934ecdd..8b6e0af20d 100644 --- a/esapi/api.fleet.msearch.go +++ b/esapi/api.fleet.msearch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.fleet.post_secret.go b/esapi/api.fleet.post_secret.go index c6a84d7479..318ec212cb 100644 --- a/esapi/api.fleet.post_secret.go +++ b/esapi/api.fleet.post_secret.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.fleet.search.go b/esapi/api.fleet.search.go index f6003d6441..d562518cd2 100644 --- a/esapi/api.fleet.search.go +++ b/esapi/api.fleet.search.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.get.go b/esapi/api.get.go index 27ef6acddc..639a90796b 100644 --- a/esapi/api.get.go +++ b/esapi/api.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.get_script.go b/esapi/api.get_script.go index 38e993f5e7..73c867e7ae 100644 --- a/esapi/api.get_script.go +++ b/esapi/api.get_script.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.get_script_context.go b/esapi/api.get_script_context.go index 1a3d776e18..d684c48ec0 100644 --- a/esapi/api.get_script_context.go +++ b/esapi/api.get_script_context.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.get_script_languages.go b/esapi/api.get_script_languages.go index aee9443a67..1ee8de6b2d 100644 --- a/esapi/api.get_script_languages.go +++ b/esapi/api.get_script_languages.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.get_source.go b/esapi/api.get_source.go index cdc5216eea..819e1bf4ce 100644 --- a/esapi/api.get_source.go +++ b/esapi/api.get_source.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.health_report.go b/esapi/api.health_report.go index c59a68b9d2..ecf7962a89 100644 --- a/esapi/api.health_report.go +++ b/esapi/api.health_report.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.index.go b/esapi/api.index.go index f4a8cd6922..346b77bf70 100644 --- a/esapi/api.index.go +++ b/esapi/api.index.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -63,6 +63,7 @@ type IndexRequest struct { Pipeline string Refresh string RequireAlias *bool + RequireDataStream *bool Routing string Timeout time.Duration Version *int @@ -147,6 +148,10 @@ func (r IndexRequest) Do(providedCtx context.Context, transport Transport) (*Res params["require_alias"] = strconv.FormatBool(*r.RequireAlias) } + if r.RequireDataStream != nil { + params["require_data_stream"] = strconv.FormatBool(*r.RequireDataStream) + } + if r.Routing != "" { params["routing"] = r.Routing } @@ -301,6 +306,13 @@ func (f Index) WithRequireAlias(v bool) func(*IndexRequest) { } } +// WithRequireDataStream - when true, requires the destination to be a data stream (existing or to-be-created). default is false. +func (f Index) WithRequireDataStream(v bool) func(*IndexRequest) { + return func(r *IndexRequest) { + r.RequireDataStream = &v + } +} + // WithRouting - specific routing value. func (f Index) WithRouting(v string) func(*IndexRequest) { return func(r *IndexRequest) { diff --git a/esapi/api.indices.add_block.go b/esapi/api.indices.add_block.go index 1beca0374f..784364cbff 100644 --- a/esapi/api.indices.add_block.go +++ b/esapi/api.indices.add_block.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.analyze.go b/esapi/api.indices.analyze.go index af7283bf5b..19076d3115 100644 --- a/esapi/api.indices.analyze.go +++ b/esapi/api.indices.analyze.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.clear_cache.go b/esapi/api.indices.clear_cache.go index 4aae3a2eef..f16d9baa25 100644 --- a/esapi/api.indices.clear_cache.go +++ b/esapi/api.indices.clear_cache.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.clone.go b/esapi/api.indices.clone.go index f7fb01caaf..7e8e39cad4 100644 --- a/esapi/api.indices.clone.go +++ b/esapi/api.indices.clone.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.close.go b/esapi/api.indices.close.go index 41371240cd..80c483f390 100644 --- a/esapi/api.indices.close.go +++ b/esapi/api.indices.close.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.create.go b/esapi/api.indices.create.go index 4b8bd3466a..abc47fec55 100644 --- a/esapi/api.indices.create.go +++ b/esapi/api.indices.create.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.delete.go b/esapi/api.indices.delete.go index 03595398cd..f7a6e28153 100644 --- a/esapi/api.indices.delete.go +++ b/esapi/api.indices.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.delete_alias.go b/esapi/api.indices.delete_alias.go index 0d248d2d26..5261165c38 100644 --- a/esapi/api.indices.delete_alias.go +++ b/esapi/api.indices.delete_alias.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.delete_data_lifecycle.go b/esapi/api.indices.delete_data_lifecycle.go index a5703fc7ee..03056e3d8d 100644 --- a/esapi/api.indices.delete_data_lifecycle.go +++ b/esapi/api.indices.delete_data_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.delete_index_template.go b/esapi/api.indices.delete_index_template.go index e6c8136bfa..66f7eaebcf 100644 --- a/esapi/api.indices.delete_index_template.go +++ b/esapi/api.indices.delete_index_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.delete_template.go b/esapi/api.indices.delete_template.go index 46f41c0f48..8ce98d4f4d 100644 --- a/esapi/api.indices.delete_template.go +++ b/esapi/api.indices.delete_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.disk_usage.go b/esapi/api.indices.disk_usage.go index a1574fab92..5797aefa64 100644 --- a/esapi/api.indices.disk_usage.go +++ b/esapi/api.indices.disk_usage.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.downsample.go b/esapi/api.indices.downsample.go index c3c457289b..ab4027e7a2 100644 --- a/esapi/api.indices.downsample.go +++ b/esapi/api.indices.downsample.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.exists.go b/esapi/api.indices.exists.go index 1da6e6bece..1b4a9aefad 100644 --- a/esapi/api.indices.exists.go +++ b/esapi/api.indices.exists.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.exists_alias.go b/esapi/api.indices.exists_alias.go index 76639c412b..4687d76b1b 100644 --- a/esapi/api.indices.exists_alias.go +++ b/esapi/api.indices.exists_alias.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.exists_index_template.go b/esapi/api.indices.exists_index_template.go index 36be706323..9648c333dd 100644 --- a/esapi/api.indices.exists_index_template.go +++ b/esapi/api.indices.exists_index_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.exists_template.go b/esapi/api.indices.exists_template.go index 1ca331e8de..bfb1b66ced 100644 --- a/esapi/api.indices.exists_template.go +++ b/esapi/api.indices.exists_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.explain_data_lifecycle.go b/esapi/api.indices.explain_data_lifecycle.go index 324175c4de..3db5673812 100644 --- a/esapi/api.indices.explain_data_lifecycle.go +++ b/esapi/api.indices.explain_data_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.field_usage_stats.go b/esapi/api.indices.field_usage_stats.go index fe0a823304..ebd34f8707 100644 --- a/esapi/api.indices.field_usage_stats.go +++ b/esapi/api.indices.field_usage_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.flush.go b/esapi/api.indices.flush.go index 963945097d..7035a694c3 100644 --- a/esapi/api.indices.flush.go +++ b/esapi/api.indices.flush.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.forcemerge.go b/esapi/api.indices.forcemerge.go index 392aa6f322..33a931ff87 100644 --- a/esapi/api.indices.forcemerge.go +++ b/esapi/api.indices.forcemerge.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get.go b/esapi/api.indices.get.go index 0a479f1a75..d3c18ccde2 100644 --- a/esapi/api.indices.get.go +++ b/esapi/api.indices.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_alias.go b/esapi/api.indices.get_alias.go index 744935869d..b5814c9fb5 100644 --- a/esapi/api.indices.get_alias.go +++ b/esapi/api.indices.get_alias.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_data_lifecycle.go b/esapi/api.indices.get_data_lifecycle.go index e440c55a98..8210a8a697 100644 --- a/esapi/api.indices.get_data_lifecycle.go +++ b/esapi/api.indices.get_data_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_field_mapping.go b/esapi/api.indices.get_field_mapping.go index d78f81a059..140a89be9e 100644 --- a/esapi/api.indices.get_field_mapping.go +++ b/esapi/api.indices.get_field_mapping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_index_template.go b/esapi/api.indices.get_index_template.go index 227fb7ccd5..57af125158 100644 --- a/esapi/api.indices.get_index_template.go +++ b/esapi/api.indices.get_index_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_mapping.go b/esapi/api.indices.get_mapping.go index f4ec0dd51c..9de47c3f1f 100644 --- a/esapi/api.indices.get_mapping.go +++ b/esapi/api.indices.get_mapping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_settings.go b/esapi/api.indices.get_settings.go index dacc4a9224..7ee9135489 100644 --- a/esapi/api.indices.get_settings.go +++ b/esapi/api.indices.get_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_template.go b/esapi/api.indices.get_template.go index 85dc1a0813..f3642e04e8 100644 --- a/esapi/api.indices.get_template.go +++ b/esapi/api.indices.get_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.modify_data_stream.go b/esapi/api.indices.modify_data_stream.go index a1b872e3ed..8e3a5bdeb9 100644 --- a/esapi/api.indices.modify_data_stream.go +++ b/esapi/api.indices.modify_data_stream.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.open.go b/esapi/api.indices.open.go index b3d7713acf..58880388f5 100644 --- a/esapi/api.indices.open.go +++ b/esapi/api.indices.open.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.put_alias.go b/esapi/api.indices.put_alias.go index ff8675f2e3..461d19f5c8 100644 --- a/esapi/api.indices.put_alias.go +++ b/esapi/api.indices.put_alias.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.put_data_lifecycle.go b/esapi/api.indices.put_data_lifecycle.go index eaa3982fb1..efbe8b33c5 100644 --- a/esapi/api.indices.put_data_lifecycle.go +++ b/esapi/api.indices.put_data_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.put_index_template.go b/esapi/api.indices.put_index_template.go index 1f3ec6409f..2095f86639 100644 --- a/esapi/api.indices.put_index_template.go +++ b/esapi/api.indices.put_index_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.put_mapping.go b/esapi/api.indices.put_mapping.go index 6456028ea6..3caa804b0b 100644 --- a/esapi/api.indices.put_mapping.go +++ b/esapi/api.indices.put_mapping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.put_settings.go b/esapi/api.indices.put_settings.go index c55b64d2e5..2f709fa3ac 100644 --- a/esapi/api.indices.put_settings.go +++ b/esapi/api.indices.put_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.put_template.go b/esapi/api.indices.put_template.go index c8d768c78f..7fe664efc1 100644 --- a/esapi/api.indices.put_template.go +++ b/esapi/api.indices.put_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.recovery.go b/esapi/api.indices.recovery.go index 35cef83a84..47c98c089f 100644 --- a/esapi/api.indices.recovery.go +++ b/esapi/api.indices.recovery.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.refresh.go b/esapi/api.indices.refresh.go index 32dd5d52e6..c4d0b52001 100644 --- a/esapi/api.indices.refresh.go +++ b/esapi/api.indices.refresh.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.resolve_cluster.go b/esapi/api.indices.resolve_cluster.go new file mode 100644 index 0000000000..865f508d50 --- /dev/null +++ b/esapi/api.indices.resolve_cluster.go @@ -0,0 +1,280 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "errors" + "net/http" + "strconv" + "strings" +) + +func newIndicesResolveClusterFunc(t Transport) IndicesResolveCluster { + return func(name []string, o ...func(*IndicesResolveClusterRequest)) (*Response, error) { + var r = IndicesResolveClusterRequest{Name: name} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// IndicesResolveCluster resolves the specified index expressions to return information about each cluster, including the local cluster, if included. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-resolve-cluster-api.html. +type IndicesResolveCluster func(name []string, o ...func(*IndicesResolveClusterRequest)) (*Response, error) + +// IndicesResolveClusterRequest configures the Indices Resolve Cluster API request. +type IndicesResolveClusterRequest struct { + Name []string + + AllowNoIndices *bool + ExpandWildcards string + IgnoreThrottled *bool + IgnoreUnavailable *bool + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r IndicesResolveClusterRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "indices.resolve_cluster") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "GET" + + if len(r.Name) == 0 { + return nil, errors.New("name is required and cannot be nil or empty") + } + + path.Grow(7 + 1 + len("_resolve") + 1 + len("cluster") + 1 + len(strings.Join(r.Name, ","))) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_resolve") + path.WriteString("/") + path.WriteString("cluster") + path.WriteString("/") + path.WriteString(strings.Join(r.Name, ",")) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "name", strings.Join(r.Name, ",")) + } + + params = make(map[string]string) + + if r.AllowNoIndices != nil { + params["allow_no_indices"] = strconv.FormatBool(*r.AllowNoIndices) + } + + if r.ExpandWildcards != "" { + params["expand_wildcards"] = r.ExpandWildcards + } + + if r.IgnoreThrottled != nil { + params["ignore_throttled"] = strconv.FormatBool(*r.IgnoreThrottled) + } + + if r.IgnoreUnavailable != nil { + params["ignore_unavailable"] = strconv.FormatBool(*r.IgnoreUnavailable) + } + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), nil) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "indices.resolve_cluster") + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "indices.resolve_cluster") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f IndicesResolveCluster) WithContext(v context.Context) func(*IndicesResolveClusterRequest) { + return func(r *IndicesResolveClusterRequest) { + r.ctx = v + } +} + +// WithAllowNoIndices - whether to ignore if a wildcard indices expression resolves into no concrete indices. (this includes `_all` string or when no indices have been specified). +func (f IndicesResolveCluster) WithAllowNoIndices(v bool) func(*IndicesResolveClusterRequest) { + return func(r *IndicesResolveClusterRequest) { + r.AllowNoIndices = &v + } +} + +// WithExpandWildcards - whether wildcard expressions should get expanded to open or closed indices (default: open). +func (f IndicesResolveCluster) WithExpandWildcards(v string) func(*IndicesResolveClusterRequest) { + return func(r *IndicesResolveClusterRequest) { + r.ExpandWildcards = v + } +} + +// WithIgnoreThrottled - whether specified concrete, expanded or aliased indices should be ignored when throttled. +func (f IndicesResolveCluster) WithIgnoreThrottled(v bool) func(*IndicesResolveClusterRequest) { + return func(r *IndicesResolveClusterRequest) { + r.IgnoreThrottled = &v + } +} + +// WithIgnoreUnavailable - whether specified concrete indices should be ignored when unavailable (missing or closed). +func (f IndicesResolveCluster) WithIgnoreUnavailable(v bool) func(*IndicesResolveClusterRequest) { + return func(r *IndicesResolveClusterRequest) { + r.IgnoreUnavailable = &v + } +} + +// WithPretty makes the response body pretty-printed. +func (f IndicesResolveCluster) WithPretty() func(*IndicesResolveClusterRequest) { + return func(r *IndicesResolveClusterRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f IndicesResolveCluster) WithHuman() func(*IndicesResolveClusterRequest) { + return func(r *IndicesResolveClusterRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f IndicesResolveCluster) WithErrorTrace() func(*IndicesResolveClusterRequest) { + return func(r *IndicesResolveClusterRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f IndicesResolveCluster) WithFilterPath(v ...string) func(*IndicesResolveClusterRequest) { + return func(r *IndicesResolveClusterRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f IndicesResolveCluster) WithHeader(h map[string]string) func(*IndicesResolveClusterRequest) { + return func(r *IndicesResolveClusterRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f IndicesResolveCluster) WithOpaqueID(s string) func(*IndicesResolveClusterRequest) { + return func(r *IndicesResolveClusterRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.indices.resolve_index.go b/esapi/api.indices.resolve_index.go index c045ca42c7..b7b7cee33c 100644 --- a/esapi/api.indices.resolve_index.go +++ b/esapi/api.indices.resolve_index.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.rollover.go b/esapi/api.indices.rollover.go index 13f1cafb16..b484ed9928 100644 --- a/esapi/api.indices.rollover.go +++ b/esapi/api.indices.rollover.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -59,6 +59,7 @@ type IndicesRolloverRequest struct { NewIndex string DryRun *bool + Lazy *bool MasterTimeout time.Duration Timeout time.Duration WaitForActiveShards string @@ -117,6 +118,10 @@ func (r IndicesRolloverRequest) Do(providedCtx context.Context, transport Transp params["dry_run"] = strconv.FormatBool(*r.DryRun) } + if r.Lazy != nil { + params["lazy"] = strconv.FormatBool(*r.Lazy) + } + if r.MasterTimeout != 0 { params["master_timeout"] = formatDuration(r.MasterTimeout) } @@ -235,6 +240,13 @@ func (f IndicesRollover) WithDryRun(v bool) func(*IndicesRolloverRequest) { } } +// WithLazy - if set to true, the rollover action will only mark a data stream to signal that it needs to be rolled over at the next write. only allowed on data streams.. +func (f IndicesRollover) WithLazy(v bool) func(*IndicesRolloverRequest) { + return func(r *IndicesRolloverRequest) { + r.Lazy = &v + } +} + // WithMasterTimeout - specify timeout for connection to master. func (f IndicesRollover) WithMasterTimeout(v time.Duration) func(*IndicesRolloverRequest) { return func(r *IndicesRolloverRequest) { diff --git a/esapi/api.indices.segments.go b/esapi/api.indices.segments.go index d1af450e7b..7a4c641dc0 100644 --- a/esapi/api.indices.segments.go +++ b/esapi/api.indices.segments.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.shard_stores.go b/esapi/api.indices.shard_stores.go index 9f27ae9b6e..adf62b94c3 100644 --- a/esapi/api.indices.shard_stores.go +++ b/esapi/api.indices.shard_stores.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.shrink.go b/esapi/api.indices.shrink.go index 37a8e5249b..58140aaf18 100644 --- a/esapi/api.indices.shrink.go +++ b/esapi/api.indices.shrink.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.simulate_index_template.go b/esapi/api.indices.simulate_index_template.go index efc3a7882b..ed040b8fbf 100644 --- a/esapi/api.indices.simulate_index_template.go +++ b/esapi/api.indices.simulate_index_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.simulate_template.go b/esapi/api.indices.simulate_template.go index 50df5693a3..243eb758ac 100644 --- a/esapi/api.indices.simulate_template.go +++ b/esapi/api.indices.simulate_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.split.go b/esapi/api.indices.split.go index 9c2b067e42..a64e10c286 100644 --- a/esapi/api.indices.split.go +++ b/esapi/api.indices.split.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.stats.go b/esapi/api.indices.stats.go index d8da8ae0d3..14db7ae4ff 100644 --- a/esapi/api.indices.stats.go +++ b/esapi/api.indices.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.update_aliases.go b/esapi/api.indices.update_aliases.go index 32a9a7629c..962b263b95 100644 --- a/esapi/api.indices.update_aliases.go +++ b/esapi/api.indices.update_aliases.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.validate_query.go b/esapi/api.indices.validate_query.go index c9532ae017..4515a90713 100644 --- a/esapi/api.indices.validate_query.go +++ b/esapi/api.indices.validate_query.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.inference.delete_model.go b/esapi/api.inference.delete_model.go index c6c3c8ec88..297d45d4e2 100644 --- a/esapi/api.inference.delete_model.go +++ b/esapi/api.inference.delete_model.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -26,8 +26,8 @@ import ( ) func newInferenceDeleteModelFunc(t Transport) InferenceDeleteModel { - return func(model_id string, task_type string, o ...func(*InferenceDeleteModelRequest)) (*Response, error) { - var r = InferenceDeleteModelRequest{ModelID: model_id, TaskType: task_type} + return func(inference_id string, o ...func(*InferenceDeleteModelRequest)) (*Response, error) { + var r = InferenceDeleteModelRequest{InferenceID: inference_id} for _, f := range o { f(&r) } @@ -47,12 +47,12 @@ func newInferenceDeleteModelFunc(t Transport) InferenceDeleteModel { // This API is experimental. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-inference-api.html. -type InferenceDeleteModel func(model_id string, task_type string, o ...func(*InferenceDeleteModelRequest)) (*Response, error) +type InferenceDeleteModel func(inference_id string, o ...func(*InferenceDeleteModelRequest)) (*Response, error) // InferenceDeleteModelRequest configures the Inference Delete Model API request. type InferenceDeleteModelRequest struct { - ModelID string - TaskType string + InferenceID string + TaskType string Pretty bool Human bool @@ -85,19 +85,21 @@ func (r InferenceDeleteModelRequest) Do(providedCtx context.Context, transport T method = "DELETE" - path.Grow(7 + 1 + len("_inference") + 1 + len(r.TaskType) + 1 + len(r.ModelID)) + path.Grow(7 + 1 + len("_inference") + 1 + len(r.TaskType) + 1 + len(r.InferenceID)) path.WriteString("http://") path.WriteString("/") path.WriteString("_inference") - path.WriteString("/") - path.WriteString(r.TaskType) - if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.RecordPathPart(ctx, "task_type", r.TaskType) + if r.TaskType != "" { + path.WriteString("/") + path.WriteString(r.TaskType) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "task_type", r.TaskType) + } } path.WriteString("/") - path.WriteString(r.ModelID) + path.WriteString(r.InferenceID) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.RecordPathPart(ctx, "model_id", r.ModelID) + instrument.RecordPathPart(ctx, "inference_id", r.InferenceID) } params = make(map[string]string) @@ -180,6 +182,13 @@ func (f InferenceDeleteModel) WithContext(v context.Context) func(*InferenceDele } } +// WithTaskType - the task type. +func (f InferenceDeleteModel) WithTaskType(v string) func(*InferenceDeleteModelRequest) { + return func(r *InferenceDeleteModelRequest) { + r.TaskType = v + } +} + // WithPretty makes the response body pretty-printed. func (f InferenceDeleteModel) WithPretty() func(*InferenceDeleteModelRequest) { return func(r *InferenceDeleteModelRequest) { diff --git a/esapi/api.inference.get_model.go b/esapi/api.inference.get_model.go index be1590dcbc..b23765799a 100644 --- a/esapi/api.inference.get_model.go +++ b/esapi/api.inference.get_model.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -26,8 +26,8 @@ import ( ) func newInferenceGetModelFunc(t Transport) InferenceGetModel { - return func(model_id string, task_type string, o ...func(*InferenceGetModelRequest)) (*Response, error) { - var r = InferenceGetModelRequest{ModelID: model_id, TaskType: task_type} + return func(inference_id string, o ...func(*InferenceGetModelRequest)) (*Response, error) { + var r = InferenceGetModelRequest{InferenceID: inference_id} for _, f := range o { f(&r) } @@ -47,12 +47,12 @@ func newInferenceGetModelFunc(t Transport) InferenceGetModel { // This API is experimental. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/get-inference-api.html. -type InferenceGetModel func(model_id string, task_type string, o ...func(*InferenceGetModelRequest)) (*Response, error) +type InferenceGetModel func(inference_id string, o ...func(*InferenceGetModelRequest)) (*Response, error) // InferenceGetModelRequest configures the Inference Get Model API request. type InferenceGetModelRequest struct { - ModelID string - TaskType string + InferenceID string + TaskType string Pretty bool Human bool @@ -85,19 +85,21 @@ func (r InferenceGetModelRequest) Do(providedCtx context.Context, transport Tran method = "GET" - path.Grow(7 + 1 + len("_inference") + 1 + len(r.TaskType) + 1 + len(r.ModelID)) + path.Grow(7 + 1 + len("_inference") + 1 + len(r.TaskType) + 1 + len(r.InferenceID)) path.WriteString("http://") path.WriteString("/") path.WriteString("_inference") - path.WriteString("/") - path.WriteString(r.TaskType) - if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.RecordPathPart(ctx, "task_type", r.TaskType) + if r.TaskType != "" { + path.WriteString("/") + path.WriteString(r.TaskType) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "task_type", r.TaskType) + } } path.WriteString("/") - path.WriteString(r.ModelID) + path.WriteString(r.InferenceID) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.RecordPathPart(ctx, "model_id", r.ModelID) + instrument.RecordPathPart(ctx, "inference_id", r.InferenceID) } params = make(map[string]string) @@ -180,6 +182,13 @@ func (f InferenceGetModel) WithContext(v context.Context) func(*InferenceGetMode } } +// WithTaskType - the task type. +func (f InferenceGetModel) WithTaskType(v string) func(*InferenceGetModelRequest) { + return func(r *InferenceGetModelRequest) { + r.TaskType = v + } +} + // WithPretty makes the response body pretty-printed. func (f InferenceGetModel) WithPretty() func(*InferenceGetModelRequest) { return func(r *InferenceGetModelRequest) { diff --git a/esapi/api.inference.inference.go b/esapi/api.inference.inference.go index 5b5748f661..6ab5f93139 100644 --- a/esapi/api.inference.inference.go +++ b/esapi/api.inference.inference.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -27,8 +27,8 @@ import ( ) func newInferenceInferenceFunc(t Transport) InferenceInference { - return func(model_id string, task_type string, o ...func(*InferenceInferenceRequest)) (*Response, error) { - var r = InferenceInferenceRequest{ModelID: model_id, TaskType: task_type} + return func(inference_id string, o ...func(*InferenceInferenceRequest)) (*Response, error) { + var r = InferenceInferenceRequest{InferenceID: inference_id} for _, f := range o { f(&r) } @@ -48,14 +48,14 @@ func newInferenceInferenceFunc(t Transport) InferenceInference { // This API is experimental. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/post-inference-api.html. -type InferenceInference func(model_id string, task_type string, o ...func(*InferenceInferenceRequest)) (*Response, error) +type InferenceInference func(inference_id string, o ...func(*InferenceInferenceRequest)) (*Response, error) // InferenceInferenceRequest configures the Inference Inference API request. type InferenceInferenceRequest struct { Body io.Reader - ModelID string - TaskType string + InferenceID string + TaskType string Pretty bool Human bool @@ -88,19 +88,21 @@ func (r InferenceInferenceRequest) Do(providedCtx context.Context, transport Tra method = "POST" - path.Grow(7 + 1 + len("_inference") + 1 + len(r.TaskType) + 1 + len(r.ModelID)) + path.Grow(7 + 1 + len("_inference") + 1 + len(r.TaskType) + 1 + len(r.InferenceID)) path.WriteString("http://") path.WriteString("/") path.WriteString("_inference") - path.WriteString("/") - path.WriteString(r.TaskType) - if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.RecordPathPart(ctx, "task_type", r.TaskType) + if r.TaskType != "" { + path.WriteString("/") + path.WriteString(r.TaskType) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "task_type", r.TaskType) + } } path.WriteString("/") - path.WriteString(r.ModelID) + path.WriteString(r.InferenceID) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.RecordPathPart(ctx, "model_id", r.ModelID) + instrument.RecordPathPart(ctx, "inference_id", r.InferenceID) } params = make(map[string]string) @@ -197,6 +199,13 @@ func (f InferenceInference) WithBody(v io.Reader) func(*InferenceInferenceReques } } +// WithTaskType - the task type. +func (f InferenceInference) WithTaskType(v string) func(*InferenceInferenceRequest) { + return func(r *InferenceInferenceRequest) { + r.TaskType = v + } +} + // WithPretty makes the response body pretty-printed. func (f InferenceInference) WithPretty() func(*InferenceInferenceRequest) { return func(r *InferenceInferenceRequest) { diff --git a/esapi/api.inference.put_model.go b/esapi/api.inference.put_model.go index faa3c1c7aa..81de3264df 100644 --- a/esapi/api.inference.put_model.go +++ b/esapi/api.inference.put_model.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -27,8 +27,8 @@ import ( ) func newInferencePutModelFunc(t Transport) InferencePutModel { - return func(model_id string, task_type string, o ...func(*InferencePutModelRequest)) (*Response, error) { - var r = InferencePutModelRequest{ModelID: model_id, TaskType: task_type} + return func(inference_id string, o ...func(*InferencePutModelRequest)) (*Response, error) { + var r = InferencePutModelRequest{InferenceID: inference_id} for _, f := range o { f(&r) } @@ -48,14 +48,14 @@ func newInferencePutModelFunc(t Transport) InferencePutModel { // This API is experimental. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/put-inference-api.html. -type InferencePutModel func(model_id string, task_type string, o ...func(*InferencePutModelRequest)) (*Response, error) +type InferencePutModel func(inference_id string, o ...func(*InferencePutModelRequest)) (*Response, error) // InferencePutModelRequest configures the Inference Put Model API request. type InferencePutModelRequest struct { Body io.Reader - ModelID string - TaskType string + InferenceID string + TaskType string Pretty bool Human bool @@ -88,19 +88,21 @@ func (r InferencePutModelRequest) Do(providedCtx context.Context, transport Tran method = "PUT" - path.Grow(7 + 1 + len("_inference") + 1 + len(r.TaskType) + 1 + len(r.ModelID)) + path.Grow(7 + 1 + len("_inference") + 1 + len(r.TaskType) + 1 + len(r.InferenceID)) path.WriteString("http://") path.WriteString("/") path.WriteString("_inference") - path.WriteString("/") - path.WriteString(r.TaskType) - if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.RecordPathPart(ctx, "task_type", r.TaskType) + if r.TaskType != "" { + path.WriteString("/") + path.WriteString(r.TaskType) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "task_type", r.TaskType) + } } path.WriteString("/") - path.WriteString(r.ModelID) + path.WriteString(r.InferenceID) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.RecordPathPart(ctx, "model_id", r.ModelID) + instrument.RecordPathPart(ctx, "inference_id", r.InferenceID) } params = make(map[string]string) @@ -197,6 +199,13 @@ func (f InferencePutModel) WithBody(v io.Reader) func(*InferencePutModelRequest) } } +// WithTaskType - the task type. +func (f InferencePutModel) WithTaskType(v string) func(*InferencePutModelRequest) { + return func(r *InferencePutModelRequest) { + r.TaskType = v + } +} + // WithPretty makes the response body pretty-printed. func (f InferencePutModel) WithPretty() func(*InferencePutModelRequest) { return func(r *InferencePutModelRequest) { diff --git a/esapi/api.info.go b/esapi/api.info.go index 92a7f1cb6d..fe78275a6d 100644 --- a/esapi/api.info.go +++ b/esapi/api.info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.ingest.delete_pipeline.go b/esapi/api.ingest.delete_pipeline.go index 770c39a889..e280a020ee 100644 --- a/esapi/api.ingest.delete_pipeline.go +++ b/esapi/api.ingest.delete_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.ingest.geo_ip_stats.go b/esapi/api.ingest.geo_ip_stats.go index dc2eef010e..db2943056a 100644 --- a/esapi/api.ingest.geo_ip_stats.go +++ b/esapi/api.ingest.geo_ip_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.ingest.get_pipeline.go b/esapi/api.ingest.get_pipeline.go index 82977b9db7..9bc9ab42d5 100644 --- a/esapi/api.ingest.get_pipeline.go +++ b/esapi/api.ingest.get_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.ingest.processor_grok.go b/esapi/api.ingest.processor_grok.go index a3ecf9ea9d..87e72b6b9e 100644 --- a/esapi/api.ingest.processor_grok.go +++ b/esapi/api.ingest.processor_grok.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.ingest.put_pipeline.go b/esapi/api.ingest.put_pipeline.go index e10755885f..e5ebe9791b 100644 --- a/esapi/api.ingest.put_pipeline.go +++ b/esapi/api.ingest.put_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.ingest.simulate.go b/esapi/api.ingest.simulate.go index 153a8b1fab..f5c393aa49 100644 --- a/esapi/api.ingest.simulate.go +++ b/esapi/api.ingest.simulate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.knn_search.go b/esapi/api.knn_search.go index 9b775510b8..8dce408276 100644 --- a/esapi/api.knn_search.go +++ b/esapi/api.knn_search.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.mget.go b/esapi/api.mget.go index cf00011a55..acd493cd52 100644 --- a/esapi/api.mget.go +++ b/esapi/api.mget.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.msearch.go b/esapi/api.msearch.go index 5c3f1032cd..9a6292a020 100644 --- a/esapi/api.msearch.go +++ b/esapi/api.msearch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.msearch_template.go b/esapi/api.msearch_template.go index cf3dff32bf..ebc6118fcf 100644 --- a/esapi/api.msearch_template.go +++ b/esapi/api.msearch_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.mtermvectors.go b/esapi/api.mtermvectors.go index 6cb1109f99..e7f97eaefc 100644 --- a/esapi/api.mtermvectors.go +++ b/esapi/api.mtermvectors.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.clear_repositories_metering_archive.go b/esapi/api.nodes.clear_repositories_metering_archive.go index 72da992cbb..76c90ee7a4 100644 --- a/esapi/api.nodes.clear_repositories_metering_archive.go +++ b/esapi/api.nodes.clear_repositories_metering_archive.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.get_repositories_metering_info.go b/esapi/api.nodes.get_repositories_metering_info.go index 67a9f1a49b..1f5c8241a8 100644 --- a/esapi/api.nodes.get_repositories_metering_info.go +++ b/esapi/api.nodes.get_repositories_metering_info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.hot_threads.go b/esapi/api.nodes.hot_threads.go index 899a33d958..2c9e25730e 100644 --- a/esapi/api.nodes.hot_threads.go +++ b/esapi/api.nodes.hot_threads.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.info.go b/esapi/api.nodes.info.go index 5273b65bd8..6c8df20983 100644 --- a/esapi/api.nodes.info.go +++ b/esapi/api.nodes.info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.reload_secure_settings.go b/esapi/api.nodes.reload_secure_settings.go index a38ed50799..903e2118f3 100644 --- a/esapi/api.nodes.reload_secure_settings.go +++ b/esapi/api.nodes.reload_secure_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.stats.go b/esapi/api.nodes.stats.go index 24ceb4e78e..ca8de33b73 100644 --- a/esapi/api.nodes.stats.go +++ b/esapi/api.nodes.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.usage.go b/esapi/api.nodes.usage.go index 0ca62442be..fb8a922cca 100644 --- a/esapi/api.nodes.usage.go +++ b/esapi/api.nodes.usage.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.ping.go b/esapi/api.ping.go index 291502feb1..5dbb0be23e 100644 --- a/esapi/api.ping.go +++ b/esapi/api.ping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.profiling.stacktraces.go b/esapi/api.profiling.stacktraces.go new file mode 100644 index 0000000000..73cce2bbd5 --- /dev/null +++ b/esapi/api.profiling.stacktraces.go @@ -0,0 +1,225 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newProfilingStacktracesFunc(t Transport) ProfilingStacktraces { + return func(body io.Reader, o ...func(*ProfilingStacktracesRequest)) (*Response, error) { + var r = ProfilingStacktracesRequest{Body: body} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ProfilingStacktraces extracts raw stacktrace information from Universal Profiling. +// +// See full documentation at https://www.elastic.co/guide/en/observability/current/universal-profiling.html. +type ProfilingStacktraces func(body io.Reader, o ...func(*ProfilingStacktracesRequest)) (*Response, error) + +// ProfilingStacktracesRequest configures the Profiling Stacktraces API request. +type ProfilingStacktracesRequest struct { + Body io.Reader + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ProfilingStacktracesRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "profiling.stacktraces") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "POST" + + path.Grow(7 + len("/_profiling/stacktraces")) + path.WriteString("http://") + path.WriteString("/_profiling/stacktraces") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "profiling.stacktraces") + if reader := instrument.RecordRequestBody(ctx, "profiling.stacktraces", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "profiling.stacktraces") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ProfilingStacktraces) WithContext(v context.Context) func(*ProfilingStacktracesRequest) { + return func(r *ProfilingStacktracesRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ProfilingStacktraces) WithPretty() func(*ProfilingStacktracesRequest) { + return func(r *ProfilingStacktracesRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ProfilingStacktraces) WithHuman() func(*ProfilingStacktracesRequest) { + return func(r *ProfilingStacktracesRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ProfilingStacktraces) WithErrorTrace() func(*ProfilingStacktracesRequest) { + return func(r *ProfilingStacktracesRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ProfilingStacktraces) WithFilterPath(v ...string) func(*ProfilingStacktracesRequest) { + return func(r *ProfilingStacktracesRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ProfilingStacktraces) WithHeader(h map[string]string) func(*ProfilingStacktracesRequest) { + return func(r *ProfilingStacktracesRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ProfilingStacktraces) WithOpaqueID(s string) func(*ProfilingStacktracesRequest) { + return func(r *ProfilingStacktracesRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.profiling.status.go b/esapi/api.profiling.status.go index d8530335c8..fdb2c60002 100644 --- a/esapi/api.profiling.status.go +++ b/esapi/api.profiling.status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.put_script.go b/esapi/api.put_script.go index a33a7f5d58..e986601720 100644 --- a/esapi/api.put_script.go +++ b/esapi/api.put_script.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.query_ruleset.delete.go b/esapi/api.query_ruleset.delete.go index eb94e933df..cd0b8315b2 100644 --- a/esapi/api.query_ruleset.delete.go +++ b/esapi/api.query_ruleset.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.query_ruleset.get.go b/esapi/api.query_ruleset.get.go index ec935d3a93..c14464d2f8 100644 --- a/esapi/api.query_ruleset.get.go +++ b/esapi/api.query_ruleset.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.query_ruleset.list.go b/esapi/api.query_ruleset.list.go index 71090969b8..b59e1a7b7a 100644 --- a/esapi/api.query_ruleset.list.go +++ b/esapi/api.query_ruleset.list.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.query_ruleset.put.go b/esapi/api.query_ruleset.put.go index 5c385d2a5a..2c010ec77e 100644 --- a/esapi/api.query_ruleset.put.go +++ b/esapi/api.query_ruleset.put.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.rank_eval.go b/esapi/api.rank_eval.go index 0f616d6996..aa71fdd0bd 100644 --- a/esapi/api.rank_eval.go +++ b/esapi/api.rank_eval.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.reindex.go b/esapi/api.reindex.go index a7fd0db7ef..ca485df69d 100644 --- a/esapi/api.reindex.go +++ b/esapi/api.reindex.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.reindex_rethrottle.go b/esapi/api.reindex_rethrottle.go index 5d60fa8693..bedc4e7fd7 100644 --- a/esapi/api.reindex_rethrottle.go +++ b/esapi/api.reindex_rethrottle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.render_search_template.go b/esapi/api.render_search_template.go index d3184bb408..5265423ed3 100644 --- a/esapi/api.render_search_template.go +++ b/esapi/api.render_search_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.scripts_painless_execute.go b/esapi/api.scripts_painless_execute.go index debfe9ad4d..0002f43381 100644 --- a/esapi/api.scripts_painless_execute.go +++ b/esapi/api.scripts_painless_execute.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.scroll.go b/esapi/api.scroll.go index cb413e7d07..fcf998a2d8 100644 --- a/esapi/api.scroll.go +++ b/esapi/api.scroll.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search.go b/esapi/api.search.go index d01c248327..cd4ba2d7a2 100644 --- a/esapi/api.search.go +++ b/esapi/api.search.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.delete.go b/esapi/api.search_application.delete.go index 8e20d52824..b0c35dd4eb 100644 --- a/esapi/api.search_application.delete.go +++ b/esapi/api.search_application.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.delete_behavioral_analytics.go b/esapi/api.search_application.delete_behavioral_analytics.go index 329f0576bc..6a4ade475c 100644 --- a/esapi/api.search_application.delete_behavioral_analytics.go +++ b/esapi/api.search_application.delete_behavioral_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.get.go b/esapi/api.search_application.get.go index 4619d6b3cf..85a7ebf7d4 100644 --- a/esapi/api.search_application.get.go +++ b/esapi/api.search_application.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.get_behavioral_analytics.go b/esapi/api.search_application.get_behavioral_analytics.go index cba2e12986..1d2e41fac1 100644 --- a/esapi/api.search_application.get_behavioral_analytics.go +++ b/esapi/api.search_application.get_behavioral_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.list.go b/esapi/api.search_application.list.go index 99fdc1e9ed..0ce836b58c 100644 --- a/esapi/api.search_application.list.go +++ b/esapi/api.search_application.list.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.post_behavioral_analytics_event.go b/esapi/api.search_application.post_behavioral_analytics_event.go index 65e56e5eff..04fc1a7f9b 100644 --- a/esapi/api.search_application.post_behavioral_analytics_event.go +++ b/esapi/api.search_application.post_behavioral_analytics_event.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.put.go b/esapi/api.search_application.put.go index 5836dac4d2..10e3bced48 100644 --- a/esapi/api.search_application.put.go +++ b/esapi/api.search_application.put.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.put_behavioral_analytics.go b/esapi/api.search_application.put_behavioral_analytics.go index bf1d577034..f2eb52558d 100644 --- a/esapi/api.search_application.put_behavioral_analytics.go +++ b/esapi/api.search_application.put_behavioral_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.render_query.go b/esapi/api.search_application.render_query.go index 70ccbc7f27..617090526e 100644 --- a/esapi/api.search_application.render_query.go +++ b/esapi/api.search_application.render_query.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.search.go b/esapi/api.search_application.search.go index a86f2e5467..0b9564e01b 100644 --- a/esapi/api.search_application.search.go +++ b/esapi/api.search_application.search.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_mvt.go b/esapi/api.search_mvt.go index 499d9bb972..b29275d930 100644 --- a/esapi/api.search_mvt.go +++ b/esapi/api.search_mvt.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_shards.go b/esapi/api.search_shards.go index 7ff40dce36..7dd05d79bf 100644 --- a/esapi/api.search_shards.go +++ b/esapi/api.search_shards.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_template.go b/esapi/api.search_template.go index 9adcc223ac..269583b84b 100644 --- a/esapi/api.search_template.go +++ b/esapi/api.search_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.shutdown.delete_node.go b/esapi/api.shutdown.delete_node.go index 297afa1289..94db148f13 100644 --- a/esapi/api.shutdown.delete_node.go +++ b/esapi/api.shutdown.delete_node.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.shutdown.get_node.go b/esapi/api.shutdown.get_node.go index c57ebf0e9d..a8ea5cd4ec 100644 --- a/esapi/api.shutdown.get_node.go +++ b/esapi/api.shutdown.get_node.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.shutdown.put_node.go b/esapi/api.shutdown.put_node.go index 42606d828f..87f6296588 100644 --- a/esapi/api.shutdown.put_node.go +++ b/esapi/api.shutdown.put_node.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.simulate.ingest.go b/esapi/api.simulate.ingest.go index 8bce848649..f0a3c8d417 100644 --- a/esapi/api.simulate.ingest.go +++ b/esapi/api.simulate.ingest.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.cleanup_repository.go b/esapi/api.snapshot.cleanup_repository.go index 412229dde1..6e99023b90 100644 --- a/esapi/api.snapshot.cleanup_repository.go +++ b/esapi/api.snapshot.cleanup_repository.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.clone.go b/esapi/api.snapshot.clone.go index ae81f6c738..cf21219af1 100644 --- a/esapi/api.snapshot.clone.go +++ b/esapi/api.snapshot.clone.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.create.go b/esapi/api.snapshot.create.go index 1e5ef66d76..3082947b38 100644 --- a/esapi/api.snapshot.create.go +++ b/esapi/api.snapshot.create.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.create_repository.go b/esapi/api.snapshot.create_repository.go index b00d3e8280..295bc6b545 100644 --- a/esapi/api.snapshot.create_repository.go +++ b/esapi/api.snapshot.create_repository.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.delete.go b/esapi/api.snapshot.delete.go index 9db0b1e710..58f4b2735c 100644 --- a/esapi/api.snapshot.delete.go +++ b/esapi/api.snapshot.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.delete_repository.go b/esapi/api.snapshot.delete_repository.go index 4a3824e523..1c932ec1eb 100644 --- a/esapi/api.snapshot.delete_repository.go +++ b/esapi/api.snapshot.delete_repository.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.get.go b/esapi/api.snapshot.get.go index 66e13afec6..23db446462 100644 --- a/esapi/api.snapshot.get.go +++ b/esapi/api.snapshot.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.get_repository.go b/esapi/api.snapshot.get_repository.go index b96e24fe30..82c4ed6372 100644 --- a/esapi/api.snapshot.get_repository.go +++ b/esapi/api.snapshot.get_repository.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.repository_analyze.go b/esapi/api.snapshot.repository_analyze.go index 6b4d84867f..79d876095c 100644 --- a/esapi/api.snapshot.repository_analyze.go +++ b/esapi/api.snapshot.repository_analyze.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.restore.go b/esapi/api.snapshot.restore.go index 650840ec0f..c402746ed7 100644 --- a/esapi/api.snapshot.restore.go +++ b/esapi/api.snapshot.restore.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.status.go b/esapi/api.snapshot.status.go index 37f452f048..0844416758 100644 --- a/esapi/api.snapshot.status.go +++ b/esapi/api.snapshot.status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.verify_repository.go b/esapi/api.snapshot.verify_repository.go index 036d6342c4..595ba27141 100644 --- a/esapi/api.snapshot.verify_repository.go +++ b/esapi/api.snapshot.verify_repository.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.synonyms.delete_synonym.go b/esapi/api.synonyms.delete_synonym.go index da700a7292..828aeb1392 100644 --- a/esapi/api.synonyms.delete_synonym.go +++ b/esapi/api.synonyms.delete_synonym.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -44,8 +44,6 @@ func newSynonymsDeleteSynonymFunc(t Transport) SynonymsDeleteSynonym { // SynonymsDeleteSynonym deletes a synonym set // -// This API is experimental. -// // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-synonyms-set.html. type SynonymsDeleteSynonym func(id string, o ...func(*SynonymsDeleteSynonymRequest)) (*Response, error) diff --git a/esapi/api.synonyms.delete_synonym_rule.go b/esapi/api.synonyms.delete_synonym_rule.go index cd11870f0c..0eff28028c 100644 --- a/esapi/api.synonyms.delete_synonym_rule.go +++ b/esapi/api.synonyms.delete_synonym_rule.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -44,8 +44,6 @@ func newSynonymsDeleteSynonymRuleFunc(t Transport) SynonymsDeleteSynonymRule { // SynonymsDeleteSynonymRule deletes a synonym rule in a synonym set // -// This API is experimental. -// // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-synonym-rule.html. type SynonymsDeleteSynonymRule func(rule_id string, set_id string, o ...func(*SynonymsDeleteSynonymRuleRequest)) (*Response, error) diff --git a/esapi/api.synonyms.get_synonym.go b/esapi/api.synonyms.get_synonym.go index 5b405a33df..2bb42923f3 100644 --- a/esapi/api.synonyms.get_synonym.go +++ b/esapi/api.synonyms.get_synonym.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -45,8 +45,6 @@ func newSynonymsGetSynonymFunc(t Transport) SynonymsGetSynonym { // SynonymsGetSynonym retrieves a synonym set // -// This API is experimental. -// // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/get-synonyms-set.html. type SynonymsGetSynonym func(id string, o ...func(*SynonymsGetSynonymRequest)) (*Response, error) diff --git a/esapi/api.synonyms.get_synonym_rule.go b/esapi/api.synonyms.get_synonym_rule.go index d54fb8dcc0..2fce0e4716 100644 --- a/esapi/api.synonyms.get_synonym_rule.go +++ b/esapi/api.synonyms.get_synonym_rule.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -44,8 +44,6 @@ func newSynonymsGetSynonymRuleFunc(t Transport) SynonymsGetSynonymRule { // SynonymsGetSynonymRule retrieves a synonym rule from a synonym set // -// This API is experimental. -// // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/get-synonym-rule.html. type SynonymsGetSynonymRule func(rule_id string, set_id string, o ...func(*SynonymsGetSynonymRuleRequest)) (*Response, error) diff --git a/esapi/api.synonyms.get_synonyms_sets.go b/esapi/api.synonyms.get_synonyms_sets.go index fd9828e5f1..eba33df1ee 100644 --- a/esapi/api.synonyms.get_synonyms_sets.go +++ b/esapi/api.synonyms.get_synonyms_sets.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -45,8 +45,6 @@ func newSynonymsGetSynonymsSetsFunc(t Transport) SynonymsGetSynonymsSets { // SynonymsGetSynonymsSets retrieves a summary of all defined synonym sets // -// This API is experimental. -// // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/list-synonyms-sets.html. type SynonymsGetSynonymsSets func(o ...func(*SynonymsGetSynonymsSetsRequest)) (*Response, error) diff --git a/esapi/api.synonyms.put_synonym.go b/esapi/api.synonyms.put_synonym.go index d0274042b4..837b988544 100644 --- a/esapi/api.synonyms.put_synonym.go +++ b/esapi/api.synonyms.put_synonym.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -45,8 +45,6 @@ func newSynonymsPutSynonymFunc(t Transport) SynonymsPutSynonym { // SynonymsPutSynonym creates or updates a synonyms set // -// This API is experimental. -// // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/put-synonyms-set.html. type SynonymsPutSynonym func(id string, body io.Reader, o ...func(*SynonymsPutSynonymRequest)) (*Response, error) diff --git a/esapi/api.synonyms.put_synonym_rule.go b/esapi/api.synonyms.put_synonym_rule.go index 787ea534da..bdea3bf69b 100644 --- a/esapi/api.synonyms.put_synonym_rule.go +++ b/esapi/api.synonyms.put_synonym_rule.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -45,8 +45,6 @@ func newSynonymsPutSynonymRuleFunc(t Transport) SynonymsPutSynonymRule { // SynonymsPutSynonymRule creates or updates a synonym rule in a synonym set // -// This API is experimental. -// // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/put-synonym-rule.html. type SynonymsPutSynonymRule func(body io.Reader, rule_id string, set_id string, o ...func(*SynonymsPutSynonymRuleRequest)) (*Response, error) diff --git a/esapi/api.tasks.cancel.go b/esapi/api.tasks.cancel.go index 1f5db90a1a..002c42f3db 100644 --- a/esapi/api.tasks.cancel.go +++ b/esapi/api.tasks.cancel.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.tasks.get.go b/esapi/api.tasks.get.go index 06db008b49..9e2a12f406 100644 --- a/esapi/api.tasks.get.go +++ b/esapi/api.tasks.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.tasks.list.go b/esapi/api.tasks.list.go index c2a8e6ce17..6a2da2bfe4 100644 --- a/esapi/api.tasks.list.go +++ b/esapi/api.tasks.list.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.terms_enum.go b/esapi/api.terms_enum.go index 2592a24d35..3d24f409fd 100644 --- a/esapi/api.terms_enum.go +++ b/esapi/api.terms_enum.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.termvectors.go b/esapi/api.termvectors.go index 4661d937a8..221c085706 100644 --- a/esapi/api.termvectors.go +++ b/esapi/api.termvectors.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.update.go b/esapi/api.update.go index 154ec58d6c..f1a52fa368 100644 --- a/esapi/api.update.go +++ b/esapi/api.update.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.update_by_query.go b/esapi/api.update_by_query.go index 21fda91eed..5eab11fc10 100644 --- a/esapi/api.update_by_query.go +++ b/esapi/api.update_by_query.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.update_by_query_rethrottle.go b/esapi/api.update_by_query_rethrottle.go index a117997c11..b252953860 100644 --- a/esapi/api.update_by_query_rethrottle.go +++ b/esapi/api.update_by_query_rethrottle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.async_search.delete.go b/esapi/api.xpack.async_search.delete.go index c383fb1fde..61727ee0e0 100644 --- a/esapi/api.xpack.async_search.delete.go +++ b/esapi/api.xpack.async_search.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.async_search.get.go b/esapi/api.xpack.async_search.get.go index 667c3599b8..40db490bd7 100644 --- a/esapi/api.xpack.async_search.get.go +++ b/esapi/api.xpack.async_search.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.async_search.status.go b/esapi/api.xpack.async_search.status.go index 81bbcd4671..2c3c03229a 100644 --- a/esapi/api.xpack.async_search.status.go +++ b/esapi/api.xpack.async_search.status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newAsyncSearchStatusFunc(t Transport) AsyncSearchStatus { @@ -51,6 +52,8 @@ type AsyncSearchStatus func(id string, o ...func(*AsyncSearchStatusRequest)) (*R type AsyncSearchStatusRequest struct { DocumentID string + KeepAlive time.Duration + Pretty bool Human bool ErrorTrace bool @@ -96,6 +99,10 @@ func (r AsyncSearchStatusRequest) Do(providedCtx context.Context, transport Tran params = make(map[string]string) + if r.KeepAlive != 0 { + params["keep_alive"] = formatDuration(r.KeepAlive) + } + if r.Pretty { params["pretty"] = "true" } @@ -174,6 +181,13 @@ func (f AsyncSearchStatus) WithContext(v context.Context) func(*AsyncSearchStatu } } +// WithKeepAlive - specify the time interval in which the results (partial or final) for this search will be available. +func (f AsyncSearchStatus) WithKeepAlive(v time.Duration) func(*AsyncSearchStatusRequest) { + return func(r *AsyncSearchStatusRequest) { + r.KeepAlive = v + } +} + // WithPretty makes the response body pretty-printed. func (f AsyncSearchStatus) WithPretty() func(*AsyncSearchStatusRequest) { return func(r *AsyncSearchStatusRequest) { diff --git a/esapi/api.xpack.async_search.submit.go b/esapi/api.xpack.async_search.submit.go index e0d157599e..a97f56e5be 100644 --- a/esapi/api.xpack.async_search.submit.go +++ b/esapi/api.xpack.async_search.submit.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.autoscaling.delete_autoscaling_policy.go b/esapi/api.xpack.autoscaling.delete_autoscaling_policy.go index e42a81cd24..80e1a9da1b 100644 --- a/esapi/api.xpack.autoscaling.delete_autoscaling_policy.go +++ b/esapi/api.xpack.autoscaling.delete_autoscaling_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.autoscaling.get_autoscaling_capacity.go b/esapi/api.xpack.autoscaling.get_autoscaling_capacity.go index ce59f21c7c..55af74010d 100644 --- a/esapi/api.xpack.autoscaling.get_autoscaling_capacity.go +++ b/esapi/api.xpack.autoscaling.get_autoscaling_capacity.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.autoscaling.get_autoscaling_policy.go b/esapi/api.xpack.autoscaling.get_autoscaling_policy.go index 2f37533483..b287d76cd5 100644 --- a/esapi/api.xpack.autoscaling.get_autoscaling_policy.go +++ b/esapi/api.xpack.autoscaling.get_autoscaling_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.autoscaling.put_autoscaling_policy.go b/esapi/api.xpack.autoscaling.put_autoscaling_policy.go index 8848c727fe..cd8cb4159b 100644 --- a/esapi/api.xpack.autoscaling.put_autoscaling_policy.go +++ b/esapi/api.xpack.autoscaling.put_autoscaling_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.cat.ml_data_frame_analytics.go b/esapi/api.xpack.cat.ml_data_frame_analytics.go index 59b1c3f2ef..752e0bac6b 100644 --- a/esapi/api.xpack.cat.ml_data_frame_analytics.go +++ b/esapi/api.xpack.cat.ml_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.cat.ml_datafeeds.go b/esapi/api.xpack.cat.ml_datafeeds.go index 23455c8484..de8dae78a3 100644 --- a/esapi/api.xpack.cat.ml_datafeeds.go +++ b/esapi/api.xpack.cat.ml_datafeeds.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.cat.ml_jobs.go b/esapi/api.xpack.cat.ml_jobs.go index c507506860..b6703af90b 100644 --- a/esapi/api.xpack.cat.ml_jobs.go +++ b/esapi/api.xpack.cat.ml_jobs.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.cat.ml_trained_models.go b/esapi/api.xpack.cat.ml_trained_models.go index f418fc9507..411ca5bd7d 100644 --- a/esapi/api.xpack.cat.ml_trained_models.go +++ b/esapi/api.xpack.cat.ml_trained_models.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.cat.transforms.go b/esapi/api.xpack.cat.transforms.go index 1421e0a492..1d43f6ad18 100644 --- a/esapi/api.xpack.cat.transforms.go +++ b/esapi/api.xpack.cat.transforms.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.delete_auto_follow_pattern.go b/esapi/api.xpack.ccr.delete_auto_follow_pattern.go index ad367987ee..edcbdb39e8 100644 --- a/esapi/api.xpack.ccr.delete_auto_follow_pattern.go +++ b/esapi/api.xpack.ccr.delete_auto_follow_pattern.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.follow.go b/esapi/api.xpack.ccr.follow.go index 487b9c1ddd..96833d2417 100644 --- a/esapi/api.xpack.ccr.follow.go +++ b/esapi/api.xpack.ccr.follow.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.follow_info.go b/esapi/api.xpack.ccr.follow_info.go index 795525d822..8b6d4d71a1 100644 --- a/esapi/api.xpack.ccr.follow_info.go +++ b/esapi/api.xpack.ccr.follow_info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.follow_stats.go b/esapi/api.xpack.ccr.follow_stats.go index 420c0d41c2..469e8b3d6e 100644 --- a/esapi/api.xpack.ccr.follow_stats.go +++ b/esapi/api.xpack.ccr.follow_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.forget_follower.go b/esapi/api.xpack.ccr.forget_follower.go index 91024de73b..14eb812443 100644 --- a/esapi/api.xpack.ccr.forget_follower.go +++ b/esapi/api.xpack.ccr.forget_follower.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.get_auto_follow_pattern.go b/esapi/api.xpack.ccr.get_auto_follow_pattern.go index 8922a37b4f..1e64c562a1 100644 --- a/esapi/api.xpack.ccr.get_auto_follow_pattern.go +++ b/esapi/api.xpack.ccr.get_auto_follow_pattern.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.pause_auto_follow_pattern.go b/esapi/api.xpack.ccr.pause_auto_follow_pattern.go index 7722767073..ec926df6d1 100644 --- a/esapi/api.xpack.ccr.pause_auto_follow_pattern.go +++ b/esapi/api.xpack.ccr.pause_auto_follow_pattern.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.pause_follow.go b/esapi/api.xpack.ccr.pause_follow.go index a44e9fccbc..c9238deb9b 100644 --- a/esapi/api.xpack.ccr.pause_follow.go +++ b/esapi/api.xpack.ccr.pause_follow.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.put_auto_follow_pattern.go b/esapi/api.xpack.ccr.put_auto_follow_pattern.go index 47d85c6c28..163d642162 100644 --- a/esapi/api.xpack.ccr.put_auto_follow_pattern.go +++ b/esapi/api.xpack.ccr.put_auto_follow_pattern.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.resume_auto_follow_pattern.go b/esapi/api.xpack.ccr.resume_auto_follow_pattern.go index 17f4fd0fc2..a921f57431 100644 --- a/esapi/api.xpack.ccr.resume_auto_follow_pattern.go +++ b/esapi/api.xpack.ccr.resume_auto_follow_pattern.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.resume_follow.go b/esapi/api.xpack.ccr.resume_follow.go index 7c654961df..f80c62d8da 100644 --- a/esapi/api.xpack.ccr.resume_follow.go +++ b/esapi/api.xpack.ccr.resume_follow.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.stats.go b/esapi/api.xpack.ccr.stats.go index 701e8ecd74..4d3746fb28 100644 --- a/esapi/api.xpack.ccr.stats.go +++ b/esapi/api.xpack.ccr.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.unfollow.go b/esapi/api.xpack.ccr.unfollow.go index fc1ec6dff2..174ff55b54 100644 --- a/esapi/api.xpack.ccr.unfollow.go +++ b/esapi/api.xpack.ccr.unfollow.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.close_point_in_time.go b/esapi/api.xpack.close_point_in_time.go index 00503ce032..d875fca385 100644 --- a/esapi/api.xpack.close_point_in_time.go +++ b/esapi/api.xpack.close_point_in_time.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.enrich.delete_policy.go b/esapi/api.xpack.enrich.delete_policy.go index ae661a788d..ab842a6755 100644 --- a/esapi/api.xpack.enrich.delete_policy.go +++ b/esapi/api.xpack.enrich.delete_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.enrich.execute_policy.go b/esapi/api.xpack.enrich.execute_policy.go index e1be21fd1c..49bce41da1 100644 --- a/esapi/api.xpack.enrich.execute_policy.go +++ b/esapi/api.xpack.enrich.execute_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.enrich.get_policy.go b/esapi/api.xpack.enrich.get_policy.go index cfee7ae2b6..1401c2f25e 100644 --- a/esapi/api.xpack.enrich.get_policy.go +++ b/esapi/api.xpack.enrich.get_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.enrich.put_policy.go b/esapi/api.xpack.enrich.put_policy.go index bae375ce37..8b1b8b9b3e 100644 --- a/esapi/api.xpack.enrich.put_policy.go +++ b/esapi/api.xpack.enrich.put_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.enrich.stats.go b/esapi/api.xpack.enrich.stats.go index 7184653910..6283c9afb4 100644 --- a/esapi/api.xpack.enrich.stats.go +++ b/esapi/api.xpack.enrich.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.eql.delete.go b/esapi/api.xpack.eql.delete.go index 10aaff33a2..5964d6d21f 100644 --- a/esapi/api.xpack.eql.delete.go +++ b/esapi/api.xpack.eql.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.eql.get.go b/esapi/api.xpack.eql.get.go index 518aafec83..b6e2dcd954 100644 --- a/esapi/api.xpack.eql.get.go +++ b/esapi/api.xpack.eql.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.eql.get_status.go b/esapi/api.xpack.eql.get_status.go index f11e6e75c9..94da2eb425 100644 --- a/esapi/api.xpack.eql.get_status.go +++ b/esapi/api.xpack.eql.get_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.eql.search.go b/esapi/api.xpack.eql.search.go index 07df8ddc8f..3c2fcc2e7a 100644 --- a/esapi/api.xpack.eql.search.go +++ b/esapi/api.xpack.eql.search.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.esql.async_query.go b/esapi/api.xpack.esql.async_query.go new file mode 100644 index 0000000000..1229bb8905 --- /dev/null +++ b/esapi/api.xpack.esql.async_query.go @@ -0,0 +1,265 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strconv" + "strings" +) + +func newEsqlAsyncQueryFunc(t Transport) EsqlAsyncQuery { + return func(body io.Reader, o ...func(*EsqlAsyncQueryRequest)) (*Response, error) { + var r = EsqlAsyncQueryRequest{Body: body} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// EsqlAsyncQuery - Executes an ESQL request asynchronously +// +// This API is experimental. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-async-query-api.html. +type EsqlAsyncQuery func(body io.Reader, o ...func(*EsqlAsyncQueryRequest)) (*Response, error) + +// EsqlAsyncQueryRequest configures the Esql Async Query API request. +type EsqlAsyncQueryRequest struct { + Body io.Reader + + Delimiter string + DropNullColumns *bool + Format string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r EsqlAsyncQueryRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "esql.async_query") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "POST" + + path.Grow(7 + len("/_query/async")) + path.WriteString("http://") + path.WriteString("/_query/async") + + params = make(map[string]string) + + if r.Delimiter != "" { + params["delimiter"] = r.Delimiter + } + + if r.DropNullColumns != nil { + params["drop_null_columns"] = strconv.FormatBool(*r.DropNullColumns) + } + + if r.Format != "" { + params["format"] = r.Format + } + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "esql.async_query") + if reader := instrument.RecordRequestBody(ctx, "esql.async_query", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "esql.async_query") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f EsqlAsyncQuery) WithContext(v context.Context) func(*EsqlAsyncQueryRequest) { + return func(r *EsqlAsyncQueryRequest) { + r.ctx = v + } +} + +// WithDelimiter - the character to use between values within a csv row. only valid for the csv format.. +func (f EsqlAsyncQuery) WithDelimiter(v string) func(*EsqlAsyncQueryRequest) { + return func(r *EsqlAsyncQueryRequest) { + r.Delimiter = v + } +} + +// WithDropNullColumns - should entirely null columns be removed from the results? their name and type will be returning in a new `all_columns` section.. +func (f EsqlAsyncQuery) WithDropNullColumns(v bool) func(*EsqlAsyncQueryRequest) { + return func(r *EsqlAsyncQueryRequest) { + r.DropNullColumns = &v + } +} + +// WithFormat - a short version of the accept header, e.g. json, yaml. +func (f EsqlAsyncQuery) WithFormat(v string) func(*EsqlAsyncQueryRequest) { + return func(r *EsqlAsyncQueryRequest) { + r.Format = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f EsqlAsyncQuery) WithPretty() func(*EsqlAsyncQueryRequest) { + return func(r *EsqlAsyncQueryRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f EsqlAsyncQuery) WithHuman() func(*EsqlAsyncQueryRequest) { + return func(r *EsqlAsyncQueryRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f EsqlAsyncQuery) WithErrorTrace() func(*EsqlAsyncQueryRequest) { + return func(r *EsqlAsyncQueryRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f EsqlAsyncQuery) WithFilterPath(v ...string) func(*EsqlAsyncQueryRequest) { + return func(r *EsqlAsyncQueryRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f EsqlAsyncQuery) WithHeader(h map[string]string) func(*EsqlAsyncQueryRequest) { + return func(r *EsqlAsyncQueryRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f EsqlAsyncQuery) WithOpaqueID(s string) func(*EsqlAsyncQueryRequest) { + return func(r *EsqlAsyncQueryRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.xpack.esql.async_query_get.go b/esapi/api.xpack.esql.async_query_get.go new file mode 100644 index 0000000000..e665359cb4 --- /dev/null +++ b/esapi/api.xpack.esql.async_query_get.go @@ -0,0 +1,264 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "net/http" + "strconv" + "strings" + "time" +) + +func newEsqlAsyncQueryGetFunc(t Transport) EsqlAsyncQueryGet { + return func(id string, o ...func(*EsqlAsyncQueryGetRequest)) (*Response, error) { + var r = EsqlAsyncQueryGetRequest{DocumentID: id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// EsqlAsyncQueryGet - Retrieves the results of a previously submitted async query request given its ID. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-async-query-get-api.html. +type EsqlAsyncQueryGet func(id string, o ...func(*EsqlAsyncQueryGetRequest)) (*Response, error) + +// EsqlAsyncQueryGetRequest configures the Esql Async Query Get API request. +type EsqlAsyncQueryGetRequest struct { + DocumentID string + + DropNullColumns *bool + KeepAlive time.Duration + WaitForCompletionTimeout time.Duration + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r EsqlAsyncQueryGetRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "esql.async_query_get") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "GET" + + path.Grow(7 + 1 + len("_query") + 1 + len("async") + 1 + len(r.DocumentID)) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_query") + path.WriteString("/") + path.WriteString("async") + path.WriteString("/") + path.WriteString(r.DocumentID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "id", r.DocumentID) + } + + params = make(map[string]string) + + if r.DropNullColumns != nil { + params["drop_null_columns"] = strconv.FormatBool(*r.DropNullColumns) + } + + if r.KeepAlive != 0 { + params["keep_alive"] = formatDuration(r.KeepAlive) + } + + if r.WaitForCompletionTimeout != 0 { + params["wait_for_completion_timeout"] = formatDuration(r.WaitForCompletionTimeout) + } + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), nil) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "esql.async_query_get") + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "esql.async_query_get") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f EsqlAsyncQueryGet) WithContext(v context.Context) func(*EsqlAsyncQueryGetRequest) { + return func(r *EsqlAsyncQueryGetRequest) { + r.ctx = v + } +} + +// WithDropNullColumns - should entirely null columns be removed from the results? their name and type will be returning in a new `all_columns` section.. +func (f EsqlAsyncQueryGet) WithDropNullColumns(v bool) func(*EsqlAsyncQueryGetRequest) { + return func(r *EsqlAsyncQueryGetRequest) { + r.DropNullColumns = &v + } +} + +// WithKeepAlive - specify the time interval in which the results (partial or final) for this search will be available. +func (f EsqlAsyncQueryGet) WithKeepAlive(v time.Duration) func(*EsqlAsyncQueryGetRequest) { + return func(r *EsqlAsyncQueryGetRequest) { + r.KeepAlive = v + } +} + +// WithWaitForCompletionTimeout - specify the time that the request should block waiting for the final response. +func (f EsqlAsyncQueryGet) WithWaitForCompletionTimeout(v time.Duration) func(*EsqlAsyncQueryGetRequest) { + return func(r *EsqlAsyncQueryGetRequest) { + r.WaitForCompletionTimeout = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f EsqlAsyncQueryGet) WithPretty() func(*EsqlAsyncQueryGetRequest) { + return func(r *EsqlAsyncQueryGetRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f EsqlAsyncQueryGet) WithHuman() func(*EsqlAsyncQueryGetRequest) { + return func(r *EsqlAsyncQueryGetRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f EsqlAsyncQueryGet) WithErrorTrace() func(*EsqlAsyncQueryGetRequest) { + return func(r *EsqlAsyncQueryGetRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f EsqlAsyncQueryGet) WithFilterPath(v ...string) func(*EsqlAsyncQueryGetRequest) { + return func(r *EsqlAsyncQueryGetRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f EsqlAsyncQueryGet) WithHeader(h map[string]string) func(*EsqlAsyncQueryGetRequest) { + return func(r *EsqlAsyncQueryGetRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f EsqlAsyncQueryGet) WithOpaqueID(s string) func(*EsqlAsyncQueryGetRequest) { + return func(r *EsqlAsyncQueryGetRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.xpack.esql.query.go b/esapi/api.xpack.esql.query.go index 9fc4dfd727..cb256e2679 100644 --- a/esapi/api.xpack.esql.query.go +++ b/esapi/api.xpack.esql.query.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "io" "net/http" + "strconv" "strings" ) @@ -54,8 +55,9 @@ type EsqlQuery func(body io.Reader, o ...func(*EsqlQueryRequest)) (*Response, er type EsqlQueryRequest struct { Body io.Reader - Delimiter string - Format string + Delimiter string + DropNullColumns *bool + Format string Pretty bool Human bool @@ -98,6 +100,10 @@ func (r EsqlQueryRequest) Do(providedCtx context.Context, transport Transport) ( params["delimiter"] = r.Delimiter } + if r.DropNullColumns != nil { + params["drop_null_columns"] = strconv.FormatBool(*r.DropNullColumns) + } + if r.Format != "" { params["format"] = r.Format } @@ -194,6 +200,13 @@ func (f EsqlQuery) WithDelimiter(v string) func(*EsqlQueryRequest) { } } +// WithDropNullColumns - should entirely null columns be removed from the results? their name and type will be returning in a new `all_columns` section.. +func (f EsqlQuery) WithDropNullColumns(v bool) func(*EsqlQueryRequest) { + return func(r *EsqlQueryRequest) { + r.DropNullColumns = &v + } +} + // WithFormat - a short version of the accept header, e.g. json, yaml. func (f EsqlQuery) WithFormat(v string) func(*EsqlQueryRequest) { return func(r *EsqlQueryRequest) { diff --git a/esapi/api.xpack.graph.explore.go b/esapi/api.xpack.graph.explore.go index 868fa9fb20..5596006ec1 100644 --- a/esapi/api.xpack.graph.explore.go +++ b/esapi/api.xpack.graph.explore.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.delete_lifecycle.go b/esapi/api.xpack.ilm.delete_lifecycle.go index 51b1baba62..85a096dfdc 100644 --- a/esapi/api.xpack.ilm.delete_lifecycle.go +++ b/esapi/api.xpack.ilm.delete_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.explain_lifecycle.go b/esapi/api.xpack.ilm.explain_lifecycle.go index a4e0d87a27..057139de7f 100644 --- a/esapi/api.xpack.ilm.explain_lifecycle.go +++ b/esapi/api.xpack.ilm.explain_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.get_lifecycle.go b/esapi/api.xpack.ilm.get_lifecycle.go index e427fa7d67..7d418e6e42 100644 --- a/esapi/api.xpack.ilm.get_lifecycle.go +++ b/esapi/api.xpack.ilm.get_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.get_status.go b/esapi/api.xpack.ilm.get_status.go index 92422ead1f..05fb3acf39 100644 --- a/esapi/api.xpack.ilm.get_status.go +++ b/esapi/api.xpack.ilm.get_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.migrate_to_data_tiers.go b/esapi/api.xpack.ilm.migrate_to_data_tiers.go index d00ad3c7e4..a6c8cb684a 100644 --- a/esapi/api.xpack.ilm.migrate_to_data_tiers.go +++ b/esapi/api.xpack.ilm.migrate_to_data_tiers.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.move_to_step.go b/esapi/api.xpack.ilm.move_to_step.go index 09c1a405fa..00396f352c 100644 --- a/esapi/api.xpack.ilm.move_to_step.go +++ b/esapi/api.xpack.ilm.move_to_step.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.put_lifecycle.go b/esapi/api.xpack.ilm.put_lifecycle.go index e5db200fa1..4ebe7d3a28 100644 --- a/esapi/api.xpack.ilm.put_lifecycle.go +++ b/esapi/api.xpack.ilm.put_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.remove_policy.go b/esapi/api.xpack.ilm.remove_policy.go index 1af1d7d388..a684146824 100644 --- a/esapi/api.xpack.ilm.remove_policy.go +++ b/esapi/api.xpack.ilm.remove_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.retry.go b/esapi/api.xpack.ilm.retry.go index 629221e271..bb8db09f78 100644 --- a/esapi/api.xpack.ilm.retry.go +++ b/esapi/api.xpack.ilm.retry.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.start.go b/esapi/api.xpack.ilm.start.go index 05dbae2066..7972e2d811 100644 --- a/esapi/api.xpack.ilm.start.go +++ b/esapi/api.xpack.ilm.start.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.stop.go b/esapi/api.xpack.ilm.stop.go index 37bca4ba4e..83cf478b94 100644 --- a/esapi/api.xpack.ilm.stop.go +++ b/esapi/api.xpack.ilm.stop.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.create_data_stream.go b/esapi/api.xpack.indices.create_data_stream.go index 719b456a96..b45cb5536d 100644 --- a/esapi/api.xpack.indices.create_data_stream.go +++ b/esapi/api.xpack.indices.create_data_stream.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.data_streams_stats.go b/esapi/api.xpack.indices.data_streams_stats.go index d66be82916..2bc2b029e2 100644 --- a/esapi/api.xpack.indices.data_streams_stats.go +++ b/esapi/api.xpack.indices.data_streams_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.delete_data_stream.go b/esapi/api.xpack.indices.delete_data_stream.go index 12f64b4bae..e6f88cce4a 100644 --- a/esapi/api.xpack.indices.delete_data_stream.go +++ b/esapi/api.xpack.indices.delete_data_stream.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.get_data_stream.go b/esapi/api.xpack.indices.get_data_stream.go index 9de2c37088..8a2b70f5c3 100644 --- a/esapi/api.xpack.indices.get_data_stream.go +++ b/esapi/api.xpack.indices.get_data_stream.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.migrate_to_data_stream.go b/esapi/api.xpack.indices.migrate_to_data_stream.go index c67026cc52..306df6f90d 100644 --- a/esapi/api.xpack.indices.migrate_to_data_stream.go +++ b/esapi/api.xpack.indices.migrate_to_data_stream.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.promote_data_stream.go b/esapi/api.xpack.indices.promote_data_stream.go index b9b7404e15..fc51510728 100644 --- a/esapi/api.xpack.indices.promote_data_stream.go +++ b/esapi/api.xpack.indices.promote_data_stream.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.reload_search_analyzers.go b/esapi/api.xpack.indices.reload_search_analyzers.go index 88b8e223d9..d471cdd9a3 100644 --- a/esapi/api.xpack.indices.reload_search_analyzers.go +++ b/esapi/api.xpack.indices.reload_search_analyzers.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.unfreeze.go b/esapi/api.xpack.indices.unfreeze.go index 0608671b66..e50d6d0e19 100644 --- a/esapi/api.xpack.indices.unfreeze.go +++ b/esapi/api.xpack.indices.unfreeze.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.license.delete.go b/esapi/api.xpack.license.delete.go index 75f6d1a0c0..abbb52902c 100644 --- a/esapi/api.xpack.license.delete.go +++ b/esapi/api.xpack.license.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.license.get.go b/esapi/api.xpack.license.get.go index 1a9a903007..b13c842000 100644 --- a/esapi/api.xpack.license.get.go +++ b/esapi/api.xpack.license.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.license.get_basic_status.go b/esapi/api.xpack.license.get_basic_status.go index 638c5a6d7c..f51563a73a 100644 --- a/esapi/api.xpack.license.get_basic_status.go +++ b/esapi/api.xpack.license.get_basic_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.license.get_trial_status.go b/esapi/api.xpack.license.get_trial_status.go index 9f5d750e2a..328cf1bf88 100644 --- a/esapi/api.xpack.license.get_trial_status.go +++ b/esapi/api.xpack.license.get_trial_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.license.post.go b/esapi/api.xpack.license.post.go index 95055bf6f4..e30f2b2ebf 100644 --- a/esapi/api.xpack.license.post.go +++ b/esapi/api.xpack.license.post.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.license.post_start_basic.go b/esapi/api.xpack.license.post_start_basic.go index 7d830a82fe..dc2aff5a71 100644 --- a/esapi/api.xpack.license.post_start_basic.go +++ b/esapi/api.xpack.license.post_start_basic.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.license.post_start_trial.go b/esapi/api.xpack.license.post_start_trial.go index 3f2e1fe090..6bd1eb97fb 100644 --- a/esapi/api.xpack.license.post_start_trial.go +++ b/esapi/api.xpack.license.post_start_trial.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.logstash.delete_pipeline.go b/esapi/api.xpack.logstash.delete_pipeline.go index 4567b09cec..c7eb3e4849 100644 --- a/esapi/api.xpack.logstash.delete_pipeline.go +++ b/esapi/api.xpack.logstash.delete_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.logstash.get_pipeline.go b/esapi/api.xpack.logstash.get_pipeline.go index 770d956eae..eba3e555b9 100644 --- a/esapi/api.xpack.logstash.get_pipeline.go +++ b/esapi/api.xpack.logstash.get_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.logstash.put_pipeline.go b/esapi/api.xpack.logstash.put_pipeline.go index 13553e20bc..849cfb6490 100644 --- a/esapi/api.xpack.logstash.put_pipeline.go +++ b/esapi/api.xpack.logstash.put_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.migration.deprecations.go b/esapi/api.xpack.migration.deprecations.go index 1837ea5be4..372967f688 100644 --- a/esapi/api.xpack.migration.deprecations.go +++ b/esapi/api.xpack.migration.deprecations.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.migration.get_feature_upgrade_status.go b/esapi/api.xpack.migration.get_feature_upgrade_status.go index e7fb42ca01..4b76a1bcbd 100644 --- a/esapi/api.xpack.migration.get_feature_upgrade_status.go +++ b/esapi/api.xpack.migration.get_feature_upgrade_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.migration.post_feature_upgrade.go b/esapi/api.xpack.migration.post_feature_upgrade.go index 72b5dbb5e2..df06c2cc8f 100644 --- a/esapi/api.xpack.migration.post_feature_upgrade.go +++ b/esapi/api.xpack.migration.post_feature_upgrade.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.clear_trained_model_deployment_cache.go b/esapi/api.xpack.ml.clear_trained_model_deployment_cache.go index 66a3594d77..c2ffde1af3 100644 --- a/esapi/api.xpack.ml.clear_trained_model_deployment_cache.go +++ b/esapi/api.xpack.ml.clear_trained_model_deployment_cache.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.close_job.go b/esapi/api.xpack.ml.close_job.go index dbb041ed79..65537098b1 100644 --- a/esapi/api.xpack.ml.close_job.go +++ b/esapi/api.xpack.ml.close_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_calendar.go b/esapi/api.xpack.ml.delete_calendar.go index 7261ea2ad4..bd090d9c9b 100644 --- a/esapi/api.xpack.ml.delete_calendar.go +++ b/esapi/api.xpack.ml.delete_calendar.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_calendar_event.go b/esapi/api.xpack.ml.delete_calendar_event.go index 6604a80d45..4562aab922 100644 --- a/esapi/api.xpack.ml.delete_calendar_event.go +++ b/esapi/api.xpack.ml.delete_calendar_event.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_calendar_job.go b/esapi/api.xpack.ml.delete_calendar_job.go index f15a266b25..63a71a5c7d 100644 --- a/esapi/api.xpack.ml.delete_calendar_job.go +++ b/esapi/api.xpack.ml.delete_calendar_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_data_frame_analytics.go b/esapi/api.xpack.ml.delete_data_frame_analytics.go index 2538cb4f2d..2723219297 100644 --- a/esapi/api.xpack.ml.delete_data_frame_analytics.go +++ b/esapi/api.xpack.ml.delete_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_datafeed.go b/esapi/api.xpack.ml.delete_datafeed.go index 09012b24b9..1f2307c4fc 100644 --- a/esapi/api.xpack.ml.delete_datafeed.go +++ b/esapi/api.xpack.ml.delete_datafeed.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_expired_data.go b/esapi/api.xpack.ml.delete_expired_data.go index 868c365101..ccd4b6dd72 100644 --- a/esapi/api.xpack.ml.delete_expired_data.go +++ b/esapi/api.xpack.ml.delete_expired_data.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_filter.go b/esapi/api.xpack.ml.delete_filter.go index e88ba7d8d4..ddfb85c5f3 100644 --- a/esapi/api.xpack.ml.delete_filter.go +++ b/esapi/api.xpack.ml.delete_filter.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_forecast.go b/esapi/api.xpack.ml.delete_forecast.go index 9667840e7b..d7132ad170 100644 --- a/esapi/api.xpack.ml.delete_forecast.go +++ b/esapi/api.xpack.ml.delete_forecast.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_job.go b/esapi/api.xpack.ml.delete_job.go index b4c82a97b8..6632c8de52 100644 --- a/esapi/api.xpack.ml.delete_job.go +++ b/esapi/api.xpack.ml.delete_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_model_snapshot.go b/esapi/api.xpack.ml.delete_model_snapshot.go index 0ecbc34cb1..d489e8db05 100644 --- a/esapi/api.xpack.ml.delete_model_snapshot.go +++ b/esapi/api.xpack.ml.delete_model_snapshot.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_trained_model.go b/esapi/api.xpack.ml.delete_trained_model.go index 38a299e708..1abe93a31b 100644 --- a/esapi/api.xpack.ml.delete_trained_model.go +++ b/esapi/api.xpack.ml.delete_trained_model.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_trained_model_alias.go b/esapi/api.xpack.ml.delete_trained_model_alias.go index 37a0cab977..3229ae90a8 100644 --- a/esapi/api.xpack.ml.delete_trained_model_alias.go +++ b/esapi/api.xpack.ml.delete_trained_model_alias.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.estimate_model_memory.go b/esapi/api.xpack.ml.estimate_model_memory.go index 9aac0ac624..7b76bd03ca 100644 --- a/esapi/api.xpack.ml.estimate_model_memory.go +++ b/esapi/api.xpack.ml.estimate_model_memory.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.evaluate_data_frame.go b/esapi/api.xpack.ml.evaluate_data_frame.go index d2da56d5e1..a08de4b6db 100644 --- a/esapi/api.xpack.ml.evaluate_data_frame.go +++ b/esapi/api.xpack.ml.evaluate_data_frame.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.explain_data_frame_analytics.go b/esapi/api.xpack.ml.explain_data_frame_analytics.go index 5b990b2cbd..860eab832c 100644 --- a/esapi/api.xpack.ml.explain_data_frame_analytics.go +++ b/esapi/api.xpack.ml.explain_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.flush_job.go b/esapi/api.xpack.ml.flush_job.go index 73f8315557..a76d497ff7 100644 --- a/esapi/api.xpack.ml.flush_job.go +++ b/esapi/api.xpack.ml.flush_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.forecast.go b/esapi/api.xpack.ml.forecast.go index 74908ad3f1..737b5f0733 100644 --- a/esapi/api.xpack.ml.forecast.go +++ b/esapi/api.xpack.ml.forecast.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_buckets.go b/esapi/api.xpack.ml.get_buckets.go index eb59ba4c7f..d671264466 100644 --- a/esapi/api.xpack.ml.get_buckets.go +++ b/esapi/api.xpack.ml.get_buckets.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_calendar_events.go b/esapi/api.xpack.ml.get_calendar_events.go index ea3287894b..87de19fd1e 100644 --- a/esapi/api.xpack.ml.get_calendar_events.go +++ b/esapi/api.xpack.ml.get_calendar_events.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_calendars.go b/esapi/api.xpack.ml.get_calendars.go index a1584e629d..2207bd8c51 100644 --- a/esapi/api.xpack.ml.get_calendars.go +++ b/esapi/api.xpack.ml.get_calendars.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_categories.go b/esapi/api.xpack.ml.get_categories.go index 6929c2e892..9ac0cbb79e 100644 --- a/esapi/api.xpack.ml.get_categories.go +++ b/esapi/api.xpack.ml.get_categories.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_data_frame_analytics.go b/esapi/api.xpack.ml.get_data_frame_analytics.go index 4c5bc2e1e5..ef68e97116 100644 --- a/esapi/api.xpack.ml.get_data_frame_analytics.go +++ b/esapi/api.xpack.ml.get_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_data_frame_analytics_stats.go b/esapi/api.xpack.ml.get_data_frame_analytics_stats.go index 694afae3eb..5c937156bb 100644 --- a/esapi/api.xpack.ml.get_data_frame_analytics_stats.go +++ b/esapi/api.xpack.ml.get_data_frame_analytics_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_datafeed_stats.go b/esapi/api.xpack.ml.get_datafeed_stats.go index 2c647426c7..bee19c5c53 100644 --- a/esapi/api.xpack.ml.get_datafeed_stats.go +++ b/esapi/api.xpack.ml.get_datafeed_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_datafeeds.go b/esapi/api.xpack.ml.get_datafeeds.go index 1c1171bdee..b16e787ee3 100644 --- a/esapi/api.xpack.ml.get_datafeeds.go +++ b/esapi/api.xpack.ml.get_datafeeds.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_filters.go b/esapi/api.xpack.ml.get_filters.go index db045cba61..c673814aa3 100644 --- a/esapi/api.xpack.ml.get_filters.go +++ b/esapi/api.xpack.ml.get_filters.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_influencers.go b/esapi/api.xpack.ml.get_influencers.go index 743702b641..877f5b0c14 100644 --- a/esapi/api.xpack.ml.get_influencers.go +++ b/esapi/api.xpack.ml.get_influencers.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_job_stats.go b/esapi/api.xpack.ml.get_job_stats.go index 066654e4de..c596a2bde1 100644 --- a/esapi/api.xpack.ml.get_job_stats.go +++ b/esapi/api.xpack.ml.get_job_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_jobs.go b/esapi/api.xpack.ml.get_jobs.go index 6b345a5e8d..ba0a801ba8 100644 --- a/esapi/api.xpack.ml.get_jobs.go +++ b/esapi/api.xpack.ml.get_jobs.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_memory_stats.go b/esapi/api.xpack.ml.get_memory_stats.go index 07c5bbfc4a..6646333883 100644 --- a/esapi/api.xpack.ml.get_memory_stats.go +++ b/esapi/api.xpack.ml.get_memory_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_model_snapshot_upgrade_stats.go b/esapi/api.xpack.ml.get_model_snapshot_upgrade_stats.go index edfefed824..f71ad28324 100644 --- a/esapi/api.xpack.ml.get_model_snapshot_upgrade_stats.go +++ b/esapi/api.xpack.ml.get_model_snapshot_upgrade_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_model_snapshots.go b/esapi/api.xpack.ml.get_model_snapshots.go index 14001c279d..b49e1d5090 100644 --- a/esapi/api.xpack.ml.get_model_snapshots.go +++ b/esapi/api.xpack.ml.get_model_snapshots.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_overall_buckets.go b/esapi/api.xpack.ml.get_overall_buckets.go index bf5d5c0423..2c3f394b1b 100644 --- a/esapi/api.xpack.ml.get_overall_buckets.go +++ b/esapi/api.xpack.ml.get_overall_buckets.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_records.go b/esapi/api.xpack.ml.get_records.go index 6b19524518..57093fe798 100644 --- a/esapi/api.xpack.ml.get_records.go +++ b/esapi/api.xpack.ml.get_records.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_trained_models.go b/esapi/api.xpack.ml.get_trained_models.go index f584692204..a73262bfdc 100644 --- a/esapi/api.xpack.ml.get_trained_models.go +++ b/esapi/api.xpack.ml.get_trained_models.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_trained_models_stats.go b/esapi/api.xpack.ml.get_trained_models_stats.go index 8c3de9267e..d506053ce6 100644 --- a/esapi/api.xpack.ml.get_trained_models_stats.go +++ b/esapi/api.xpack.ml.get_trained_models_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.infer_trained_model.go b/esapi/api.xpack.ml.infer_trained_model.go index e96b60d422..b9b94dd1bc 100644 --- a/esapi/api.xpack.ml.infer_trained_model.go +++ b/esapi/api.xpack.ml.infer_trained_model.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.info.go b/esapi/api.xpack.ml.info.go index 5da35baf25..ea32e94f78 100644 --- a/esapi/api.xpack.ml.info.go +++ b/esapi/api.xpack.ml.info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.open_job.go b/esapi/api.xpack.ml.open_job.go index a7dfe23663..35b6d75e7f 100644 --- a/esapi/api.xpack.ml.open_job.go +++ b/esapi/api.xpack.ml.open_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.post_calendar_events.go b/esapi/api.xpack.ml.post_calendar_events.go index f78de1a613..725e8906b1 100644 --- a/esapi/api.xpack.ml.post_calendar_events.go +++ b/esapi/api.xpack.ml.post_calendar_events.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.post_data.go b/esapi/api.xpack.ml.post_data.go index 1ee00fe557..840c15dd0e 100644 --- a/esapi/api.xpack.ml.post_data.go +++ b/esapi/api.xpack.ml.post_data.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.preview_data_frame_analytics.go b/esapi/api.xpack.ml.preview_data_frame_analytics.go index a1a5334cd3..475cb00feb 100644 --- a/esapi/api.xpack.ml.preview_data_frame_analytics.go +++ b/esapi/api.xpack.ml.preview_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.preview_datafeed.go b/esapi/api.xpack.ml.preview_datafeed.go index 9926ad4d85..925f2d133e 100644 --- a/esapi/api.xpack.ml.preview_datafeed.go +++ b/esapi/api.xpack.ml.preview_datafeed.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_calendar.go b/esapi/api.xpack.ml.put_calendar.go index 5f74bff437..0aae0d4289 100644 --- a/esapi/api.xpack.ml.put_calendar.go +++ b/esapi/api.xpack.ml.put_calendar.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_calendar_job.go b/esapi/api.xpack.ml.put_calendar_job.go index 8ea651648c..a396c30b03 100644 --- a/esapi/api.xpack.ml.put_calendar_job.go +++ b/esapi/api.xpack.ml.put_calendar_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_data_frame_analytics.go b/esapi/api.xpack.ml.put_data_frame_analytics.go index 26c0e19c12..1d837e154f 100644 --- a/esapi/api.xpack.ml.put_data_frame_analytics.go +++ b/esapi/api.xpack.ml.put_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_datafeed.go b/esapi/api.xpack.ml.put_datafeed.go index f130425d77..4b5a8d1e65 100644 --- a/esapi/api.xpack.ml.put_datafeed.go +++ b/esapi/api.xpack.ml.put_datafeed.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_filter.go b/esapi/api.xpack.ml.put_filter.go index 9e2b0e3ff6..9209a584f9 100644 --- a/esapi/api.xpack.ml.put_filter.go +++ b/esapi/api.xpack.ml.put_filter.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_job.go b/esapi/api.xpack.ml.put_job.go index ff3193deec..944d7d310f 100644 --- a/esapi/api.xpack.ml.put_job.go +++ b/esapi/api.xpack.ml.put_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_trained_model.go b/esapi/api.xpack.ml.put_trained_model.go index b5c5890949..8cb4e5fdfd 100644 --- a/esapi/api.xpack.ml.put_trained_model.go +++ b/esapi/api.xpack.ml.put_trained_model.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_trained_model_alias.go b/esapi/api.xpack.ml.put_trained_model_alias.go index e3d2cd54d6..e1a1f7a00a 100644 --- a/esapi/api.xpack.ml.put_trained_model_alias.go +++ b/esapi/api.xpack.ml.put_trained_model_alias.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_trained_model_definition_part.go b/esapi/api.xpack.ml.put_trained_model_definition_part.go index 9a99013850..608870098d 100644 --- a/esapi/api.xpack.ml.put_trained_model_definition_part.go +++ b/esapi/api.xpack.ml.put_trained_model_definition_part.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_trained_model_vocabulary.go b/esapi/api.xpack.ml.put_trained_model_vocabulary.go index 9cf69609bd..aadde2c5c2 100644 --- a/esapi/api.xpack.ml.put_trained_model_vocabulary.go +++ b/esapi/api.xpack.ml.put_trained_model_vocabulary.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.reset_job.go b/esapi/api.xpack.ml.reset_job.go index 0ca13a6de3..4458e35cf6 100644 --- a/esapi/api.xpack.ml.reset_job.go +++ b/esapi/api.xpack.ml.reset_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.revert_model_snapshot.go b/esapi/api.xpack.ml.revert_model_snapshot.go index 273565b34f..343944f0ed 100644 --- a/esapi/api.xpack.ml.revert_model_snapshot.go +++ b/esapi/api.xpack.ml.revert_model_snapshot.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.set_upgrade_mode.go b/esapi/api.xpack.ml.set_upgrade_mode.go index feff58a26c..3cfb610e91 100644 --- a/esapi/api.xpack.ml.set_upgrade_mode.go +++ b/esapi/api.xpack.ml.set_upgrade_mode.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.start_data_frame_analytics.go b/esapi/api.xpack.ml.start_data_frame_analytics.go index a4b6458a52..1e6b6eac4f 100644 --- a/esapi/api.xpack.ml.start_data_frame_analytics.go +++ b/esapi/api.xpack.ml.start_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.start_datafeed.go b/esapi/api.xpack.ml.start_datafeed.go index 9b62119ceb..1da322db37 100644 --- a/esapi/api.xpack.ml.start_datafeed.go +++ b/esapi/api.xpack.ml.start_datafeed.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.start_trained_model_deployment.go b/esapi/api.xpack.ml.start_trained_model_deployment.go index 6ff6639367..d395539eba 100644 --- a/esapi/api.xpack.ml.start_trained_model_deployment.go +++ b/esapi/api.xpack.ml.start_trained_model_deployment.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.stop_data_frame_analytics.go b/esapi/api.xpack.ml.stop_data_frame_analytics.go index b150427270..40815ffa67 100644 --- a/esapi/api.xpack.ml.stop_data_frame_analytics.go +++ b/esapi/api.xpack.ml.stop_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.stop_datafeed.go b/esapi/api.xpack.ml.stop_datafeed.go index f178a34c4a..63de96e2a6 100644 --- a/esapi/api.xpack.ml.stop_datafeed.go +++ b/esapi/api.xpack.ml.stop_datafeed.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.stop_trained_model_deployment.go b/esapi/api.xpack.ml.stop_trained_model_deployment.go index 42a3808ef3..1bb2278581 100644 --- a/esapi/api.xpack.ml.stop_trained_model_deployment.go +++ b/esapi/api.xpack.ml.stop_trained_model_deployment.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.update_data_frame_analytics.go b/esapi/api.xpack.ml.update_data_frame_analytics.go index f5795c61e3..228239352e 100644 --- a/esapi/api.xpack.ml.update_data_frame_analytics.go +++ b/esapi/api.xpack.ml.update_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.update_datafeed.go b/esapi/api.xpack.ml.update_datafeed.go index 2f0efb5573..fa847fb4d2 100644 --- a/esapi/api.xpack.ml.update_datafeed.go +++ b/esapi/api.xpack.ml.update_datafeed.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.update_filter.go b/esapi/api.xpack.ml.update_filter.go index e35fd196a9..54e5440487 100644 --- a/esapi/api.xpack.ml.update_filter.go +++ b/esapi/api.xpack.ml.update_filter.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.update_job.go b/esapi/api.xpack.ml.update_job.go index fbd935b3e6..6dad1669f0 100644 --- a/esapi/api.xpack.ml.update_job.go +++ b/esapi/api.xpack.ml.update_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.update_model_snapshot.go b/esapi/api.xpack.ml.update_model_snapshot.go index e428a7702d..f53560c0d5 100644 --- a/esapi/api.xpack.ml.update_model_snapshot.go +++ b/esapi/api.xpack.ml.update_model_snapshot.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.update_trained_model_deployment.go b/esapi/api.xpack.ml.update_trained_model_deployment.go index 72d963fc30..fc51ea4340 100644 --- a/esapi/api.xpack.ml.update_trained_model_deployment.go +++ b/esapi/api.xpack.ml.update_trained_model_deployment.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.upgrade_job_snapshot.go b/esapi/api.xpack.ml.upgrade_job_snapshot.go index 718eb1e3e7..d143298b22 100644 --- a/esapi/api.xpack.ml.upgrade_job_snapshot.go +++ b/esapi/api.xpack.ml.upgrade_job_snapshot.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.validate.go b/esapi/api.xpack.ml.validate.go index f4a4ec0438..7ecf3581f3 100644 --- a/esapi/api.xpack.ml.validate.go +++ b/esapi/api.xpack.ml.validate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.validate_detector.go b/esapi/api.xpack.ml.validate_detector.go index 7820849551..e00686a755 100644 --- a/esapi/api.xpack.ml.validate_detector.go +++ b/esapi/api.xpack.ml.validate_detector.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.monitoring.bulk.go b/esapi/api.xpack.monitoring.bulk.go index 3390ecf912..1b8a7a9161 100644 --- a/esapi/api.xpack.monitoring.bulk.go +++ b/esapi/api.xpack.monitoring.bulk.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.open_point_in_time.go b/esapi/api.xpack.open_point_in_time.go index 9913771cf9..4686175d83 100644 --- a/esapi/api.xpack.open_point_in_time.go +++ b/esapi/api.xpack.open_point_in_time.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.profiling.flamegraph.go b/esapi/api.xpack.profiling.flamegraph.go new file mode 100644 index 0000000000..65192cd823 --- /dev/null +++ b/esapi/api.xpack.profiling.flamegraph.go @@ -0,0 +1,225 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newProfilingFlamegraphFunc(t Transport) ProfilingFlamegraph { + return func(body io.Reader, o ...func(*ProfilingFlamegraphRequest)) (*Response, error) { + var r = ProfilingFlamegraphRequest{Body: body} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ProfilingFlamegraph - Extracts a UI-optimized structure to render flamegraphs from Universal Profiling. +// +// See full documentation at https://www.elastic.co/guide/en/observability/current/universal-profiling.html. +type ProfilingFlamegraph func(body io.Reader, o ...func(*ProfilingFlamegraphRequest)) (*Response, error) + +// ProfilingFlamegraphRequest configures the Profiling Flamegraph API request. +type ProfilingFlamegraphRequest struct { + Body io.Reader + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ProfilingFlamegraphRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "profiling.flamegraph") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "POST" + + path.Grow(7 + len("/_profiling/flamegraph")) + path.WriteString("http://") + path.WriteString("/_profiling/flamegraph") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "profiling.flamegraph") + if reader := instrument.RecordRequestBody(ctx, "profiling.flamegraph", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "profiling.flamegraph") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ProfilingFlamegraph) WithContext(v context.Context) func(*ProfilingFlamegraphRequest) { + return func(r *ProfilingFlamegraphRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ProfilingFlamegraph) WithPretty() func(*ProfilingFlamegraphRequest) { + return func(r *ProfilingFlamegraphRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ProfilingFlamegraph) WithHuman() func(*ProfilingFlamegraphRequest) { + return func(r *ProfilingFlamegraphRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ProfilingFlamegraph) WithErrorTrace() func(*ProfilingFlamegraphRequest) { + return func(r *ProfilingFlamegraphRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ProfilingFlamegraph) WithFilterPath(v ...string) func(*ProfilingFlamegraphRequest) { + return func(r *ProfilingFlamegraphRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ProfilingFlamegraph) WithHeader(h map[string]string) func(*ProfilingFlamegraphRequest) { + return func(r *ProfilingFlamegraphRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ProfilingFlamegraph) WithOpaqueID(s string) func(*ProfilingFlamegraphRequest) { + return func(r *ProfilingFlamegraphRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.xpack.rollup.delete_job.go b/esapi/api.xpack.rollup.delete_job.go index c265ac9a05..3cbda6e7b7 100644 --- a/esapi/api.xpack.rollup.delete_job.go +++ b/esapi/api.xpack.rollup.delete_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.get_jobs.go b/esapi/api.xpack.rollup.get_jobs.go index 46baf8177d..58948907ad 100644 --- a/esapi/api.xpack.rollup.get_jobs.go +++ b/esapi/api.xpack.rollup.get_jobs.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.get_rollup_caps.go b/esapi/api.xpack.rollup.get_rollup_caps.go index 5e3a01263a..47764467d3 100644 --- a/esapi/api.xpack.rollup.get_rollup_caps.go +++ b/esapi/api.xpack.rollup.get_rollup_caps.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.get_rollup_index_caps.go b/esapi/api.xpack.rollup.get_rollup_index_caps.go index a0d7dd12e9..c20ec1276b 100644 --- a/esapi/api.xpack.rollup.get_rollup_index_caps.go +++ b/esapi/api.xpack.rollup.get_rollup_index_caps.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.put_job.go b/esapi/api.xpack.rollup.put_job.go index c1e7a2e258..8e0bd10943 100644 --- a/esapi/api.xpack.rollup.put_job.go +++ b/esapi/api.xpack.rollup.put_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.rollup_search.go b/esapi/api.xpack.rollup.rollup_search.go index 6f2e8fec8a..b8503737c2 100644 --- a/esapi/api.xpack.rollup.rollup_search.go +++ b/esapi/api.xpack.rollup.rollup_search.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.start_job.go b/esapi/api.xpack.rollup.start_job.go index dadb878111..794764e4ba 100644 --- a/esapi/api.xpack.rollup.start_job.go +++ b/esapi/api.xpack.rollup.start_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.stop_job.go b/esapi/api.xpack.rollup.stop_job.go index 2a06233cd2..46bbbe00b2 100644 --- a/esapi/api.xpack.rollup.stop_job.go +++ b/esapi/api.xpack.rollup.stop_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.searchable_snapshots.cache_stats.go b/esapi/api.xpack.searchable_snapshots.cache_stats.go index 1660a2a058..3c2b550564 100644 --- a/esapi/api.xpack.searchable_snapshots.cache_stats.go +++ b/esapi/api.xpack.searchable_snapshots.cache_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.searchable_snapshots.clear_cache.go b/esapi/api.xpack.searchable_snapshots.clear_cache.go index 0644725529..51d7a8f5b9 100644 --- a/esapi/api.xpack.searchable_snapshots.clear_cache.go +++ b/esapi/api.xpack.searchable_snapshots.clear_cache.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.searchable_snapshots.mount.go b/esapi/api.xpack.searchable_snapshots.mount.go index 28cff3e768..20fc6cffaf 100644 --- a/esapi/api.xpack.searchable_snapshots.mount.go +++ b/esapi/api.xpack.searchable_snapshots.mount.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.searchable_snapshots.stats.go b/esapi/api.xpack.searchable_snapshots.stats.go index 944154ea0d..5a664764eb 100644 --- a/esapi/api.xpack.searchable_snapshots.stats.go +++ b/esapi/api.xpack.searchable_snapshots.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.activate_user_profile.go b/esapi/api.xpack.security.activate_user_profile.go index b4879481ed..e7f8976d8b 100644 --- a/esapi/api.xpack.security.activate_user_profile.go +++ b/esapi/api.xpack.security.activate_user_profile.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.authenticate.go b/esapi/api.xpack.security.authenticate.go index 97cf419a03..08b89392f5 100644 --- a/esapi/api.xpack.security.authenticate.go +++ b/esapi/api.xpack.security.authenticate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.bulk_update_api_keys.go b/esapi/api.xpack.security.bulk_update_api_keys.go index ed03b9bbd4..16248fbd81 100644 --- a/esapi/api.xpack.security.bulk_update_api_keys.go +++ b/esapi/api.xpack.security.bulk_update_api_keys.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.change_password.go b/esapi/api.xpack.security.change_password.go index 827acda025..a8595f4615 100644 --- a/esapi/api.xpack.security.change_password.go +++ b/esapi/api.xpack.security.change_password.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.clear_api_key_cache.go b/esapi/api.xpack.security.clear_api_key_cache.go index e67dd9c5e5..62e0c7eaef 100644 --- a/esapi/api.xpack.security.clear_api_key_cache.go +++ b/esapi/api.xpack.security.clear_api_key_cache.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.clear_cached_privileges.go b/esapi/api.xpack.security.clear_cached_privileges.go index 0f86ff349d..9e167dc9d2 100644 --- a/esapi/api.xpack.security.clear_cached_privileges.go +++ b/esapi/api.xpack.security.clear_cached_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.clear_cached_realms.go b/esapi/api.xpack.security.clear_cached_realms.go index a92a29606e..c23acb9456 100644 --- a/esapi/api.xpack.security.clear_cached_realms.go +++ b/esapi/api.xpack.security.clear_cached_realms.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.clear_cached_roles.go b/esapi/api.xpack.security.clear_cached_roles.go index 4c236fdb80..415ff4b535 100644 --- a/esapi/api.xpack.security.clear_cached_roles.go +++ b/esapi/api.xpack.security.clear_cached_roles.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.clear_cached_service_tokens.go b/esapi/api.xpack.security.clear_cached_service_tokens.go index e32a0c1cdf..aedf4049e0 100644 --- a/esapi/api.xpack.security.clear_cached_service_tokens.go +++ b/esapi/api.xpack.security.clear_cached_service_tokens.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.create_api_key.go b/esapi/api.xpack.security.create_api_key.go index 5d3b98be0c..6fce608d95 100644 --- a/esapi/api.xpack.security.create_api_key.go +++ b/esapi/api.xpack.security.create_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.create_cross_cluster_api_key.go b/esapi/api.xpack.security.create_cross_cluster_api_key.go index 35cdb9e466..c01210e2f2 100644 --- a/esapi/api.xpack.security.create_cross_cluster_api_key.go +++ b/esapi/api.xpack.security.create_cross_cluster_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.create_service_token.go b/esapi/api.xpack.security.create_service_token.go index 5f5c876927..46dd57afa9 100644 --- a/esapi/api.xpack.security.create_service_token.go +++ b/esapi/api.xpack.security.create_service_token.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.delete_privileges.go b/esapi/api.xpack.security.delete_privileges.go index a79452a127..40d73f0a64 100644 --- a/esapi/api.xpack.security.delete_privileges.go +++ b/esapi/api.xpack.security.delete_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.delete_role.go b/esapi/api.xpack.security.delete_role.go index d91251d0f9..951d68c534 100644 --- a/esapi/api.xpack.security.delete_role.go +++ b/esapi/api.xpack.security.delete_role.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.delete_role_mapping.go b/esapi/api.xpack.security.delete_role_mapping.go index 1c34caea0d..5461df38da 100644 --- a/esapi/api.xpack.security.delete_role_mapping.go +++ b/esapi/api.xpack.security.delete_role_mapping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.delete_service_token.go b/esapi/api.xpack.security.delete_service_token.go index 33ce1bc659..9b7ee6e079 100644 --- a/esapi/api.xpack.security.delete_service_token.go +++ b/esapi/api.xpack.security.delete_service_token.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.delete_user.go b/esapi/api.xpack.security.delete_user.go index 02ebbdf8da..dd2478a6d0 100644 --- a/esapi/api.xpack.security.delete_user.go +++ b/esapi/api.xpack.security.delete_user.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.disable_user.go b/esapi/api.xpack.security.disable_user.go index 43de78f59d..93a443e592 100644 --- a/esapi/api.xpack.security.disable_user.go +++ b/esapi/api.xpack.security.disable_user.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.disable_user_profile.go b/esapi/api.xpack.security.disable_user_profile.go index 00964416f3..3689b7316c 100644 --- a/esapi/api.xpack.security.disable_user_profile.go +++ b/esapi/api.xpack.security.disable_user_profile.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.enable_user.go b/esapi/api.xpack.security.enable_user.go index a6dd2c79a3..cfd28d8af0 100644 --- a/esapi/api.xpack.security.enable_user.go +++ b/esapi/api.xpack.security.enable_user.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.enable_user_profile.go b/esapi/api.xpack.security.enable_user_profile.go index e7ec866aaf..4f183a812e 100644 --- a/esapi/api.xpack.security.enable_user_profile.go +++ b/esapi/api.xpack.security.enable_user_profile.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.enroll_kibana.go b/esapi/api.xpack.security.enroll_kibana.go index 136aac3765..9a9559eef1 100644 --- a/esapi/api.xpack.security.enroll_kibana.go +++ b/esapi/api.xpack.security.enroll_kibana.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.enroll_node.go b/esapi/api.xpack.security.enroll_node.go index baae57aa04..b3ccaa7833 100644 --- a/esapi/api.xpack.security.enroll_node.go +++ b/esapi/api.xpack.security.enroll_node.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_api_key.go b/esapi/api.xpack.security.get_api_key.go index 4822619e6b..64c7b0e4fa 100644 --- a/esapi/api.xpack.security.get_api_key.go +++ b/esapi/api.xpack.security.get_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_builtin_privileges.go b/esapi/api.xpack.security.get_builtin_privileges.go index e50c0094b9..150e358a2a 100644 --- a/esapi/api.xpack.security.get_builtin_privileges.go +++ b/esapi/api.xpack.security.get_builtin_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_privileges.go b/esapi/api.xpack.security.get_privileges.go index fe963f03a6..2166306581 100644 --- a/esapi/api.xpack.security.get_privileges.go +++ b/esapi/api.xpack.security.get_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_role.go b/esapi/api.xpack.security.get_role.go index ee55fce0bb..a99ab5d669 100644 --- a/esapi/api.xpack.security.get_role.go +++ b/esapi/api.xpack.security.get_role.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_role_mapping.go b/esapi/api.xpack.security.get_role_mapping.go index c41e722fb4..c0299e0e70 100644 --- a/esapi/api.xpack.security.get_role_mapping.go +++ b/esapi/api.xpack.security.get_role_mapping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_service_accounts.go b/esapi/api.xpack.security.get_service_accounts.go index c3b29eb941..f8d6dd6b1e 100644 --- a/esapi/api.xpack.security.get_service_accounts.go +++ b/esapi/api.xpack.security.get_service_accounts.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_service_credentials.go b/esapi/api.xpack.security.get_service_credentials.go index 67bd6e2882..e44d76cef2 100644 --- a/esapi/api.xpack.security.get_service_credentials.go +++ b/esapi/api.xpack.security.get_service_credentials.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_settings.go b/esapi/api.xpack.security.get_settings.go index 36ade7bcde..305762deec 100644 --- a/esapi/api.xpack.security.get_settings.go +++ b/esapi/api.xpack.security.get_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_token.go b/esapi/api.xpack.security.get_token.go index 1fdbf2dd45..d04994024a 100644 --- a/esapi/api.xpack.security.get_token.go +++ b/esapi/api.xpack.security.get_token.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_user.go b/esapi/api.xpack.security.get_user.go index 719215435c..cad6440667 100644 --- a/esapi/api.xpack.security.get_user.go +++ b/esapi/api.xpack.security.get_user.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_user_privileges.go b/esapi/api.xpack.security.get_user_privileges.go index 5511de0f38..424eba52c3 100644 --- a/esapi/api.xpack.security.get_user_privileges.go +++ b/esapi/api.xpack.security.get_user_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_user_profile.go b/esapi/api.xpack.security.get_user_profile.go index 38e017ee60..9fdede7bb2 100644 --- a/esapi/api.xpack.security.get_user_profile.go +++ b/esapi/api.xpack.security.get_user_profile.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.grant_api_key.go b/esapi/api.xpack.security.grant_api_key.go index e8ca7bb0e9..6e9994a760 100644 --- a/esapi/api.xpack.security.grant_api_key.go +++ b/esapi/api.xpack.security.grant_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.has_privileges.go b/esapi/api.xpack.security.has_privileges.go index ecb6977817..2d619da2be 100644 --- a/esapi/api.xpack.security.has_privileges.go +++ b/esapi/api.xpack.security.has_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.has_privileges_user_profile.go b/esapi/api.xpack.security.has_privileges_user_profile.go index 04614156e6..dee1e51d0a 100644 --- a/esapi/api.xpack.security.has_privileges_user_profile.go +++ b/esapi/api.xpack.security.has_privileges_user_profile.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.invalidate_api_key.go b/esapi/api.xpack.security.invalidate_api_key.go index 6a790fdb04..b04072abe7 100644 --- a/esapi/api.xpack.security.invalidate_api_key.go +++ b/esapi/api.xpack.security.invalidate_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.invalidate_token.go b/esapi/api.xpack.security.invalidate_token.go index c0369ed9fd..5616273564 100644 --- a/esapi/api.xpack.security.invalidate_token.go +++ b/esapi/api.xpack.security.invalidate_token.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.oidc_authenticate.go b/esapi/api.xpack.security.oidc_authenticate.go index 7e91ceb8e0..1c740bdb5b 100644 --- a/esapi/api.xpack.security.oidc_authenticate.go +++ b/esapi/api.xpack.security.oidc_authenticate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.oidc_logout.go b/esapi/api.xpack.security.oidc_logout.go index 15fe0e565b..81fa6e28a1 100644 --- a/esapi/api.xpack.security.oidc_logout.go +++ b/esapi/api.xpack.security.oidc_logout.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.oidc_prepare_authentication.go b/esapi/api.xpack.security.oidc_prepare_authentication.go index f5f4ac2a52..ddec3997bc 100644 --- a/esapi/api.xpack.security.oidc_prepare_authentication.go +++ b/esapi/api.xpack.security.oidc_prepare_authentication.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.put_privileges.go b/esapi/api.xpack.security.put_privileges.go index 1ccea19507..2d955484c0 100644 --- a/esapi/api.xpack.security.put_privileges.go +++ b/esapi/api.xpack.security.put_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.put_role.go b/esapi/api.xpack.security.put_role.go index 5d4e516907..87ca84e05e 100644 --- a/esapi/api.xpack.security.put_role.go +++ b/esapi/api.xpack.security.put_role.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.put_role_mapping.go b/esapi/api.xpack.security.put_role_mapping.go index 0e8378bfcc..f9313cee47 100644 --- a/esapi/api.xpack.security.put_role_mapping.go +++ b/esapi/api.xpack.security.put_role_mapping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.put_user.go b/esapi/api.xpack.security.put_user.go index 6b21ce4a66..90ae9013c9 100644 --- a/esapi/api.xpack.security.put_user.go +++ b/esapi/api.xpack.security.put_user.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.query_api_keys.go b/esapi/api.xpack.security.query_api_keys.go index 026c178be2..cd3da94c3f 100644 --- a/esapi/api.xpack.security.query_api_keys.go +++ b/esapi/api.xpack.security.query_api_keys.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.query_user.go b/esapi/api.xpack.security.query_user.go new file mode 100644 index 0000000000..99746bf7c1 --- /dev/null +++ b/esapi/api.xpack.security.query_user.go @@ -0,0 +1,246 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strconv" + "strings" +) + +func newSecurityQueryUserFunc(t Transport) SecurityQueryUser { + return func(o ...func(*SecurityQueryUserRequest)) (*Response, error) { + var r = SecurityQueryUserRequest{} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// SecurityQueryUser - Retrieves information for Users using a subset of query DSL +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-user.html. +type SecurityQueryUser func(o ...func(*SecurityQueryUserRequest)) (*Response, error) + +// SecurityQueryUserRequest configures the Security Query User API request. +type SecurityQueryUserRequest struct { + Body io.Reader + + WithProfileUID *bool + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r SecurityQueryUserRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "security.query_user") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "POST" + + path.Grow(7 + len("/_security/_query/user")) + path.WriteString("http://") + path.WriteString("/_security/_query/user") + + params = make(map[string]string) + + if r.WithProfileUID != nil { + params["with_profile_uid"] = strconv.FormatBool(*r.WithProfileUID) + } + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "security.query_user") + if reader := instrument.RecordRequestBody(ctx, "security.query_user", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "security.query_user") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f SecurityQueryUser) WithContext(v context.Context) func(*SecurityQueryUserRequest) { + return func(r *SecurityQueryUserRequest) { + r.ctx = v + } +} + +// WithBody - From, size, query, sort and search_after. +func (f SecurityQueryUser) WithBody(v io.Reader) func(*SecurityQueryUserRequest) { + return func(r *SecurityQueryUserRequest) { + r.Body = v + } +} + +// WithWithProfileUID - flag to retrieve profile uid (if exists) associated with the user. +func (f SecurityQueryUser) WithWithProfileUID(v bool) func(*SecurityQueryUserRequest) { + return func(r *SecurityQueryUserRequest) { + r.WithProfileUID = &v + } +} + +// WithPretty makes the response body pretty-printed. +func (f SecurityQueryUser) WithPretty() func(*SecurityQueryUserRequest) { + return func(r *SecurityQueryUserRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f SecurityQueryUser) WithHuman() func(*SecurityQueryUserRequest) { + return func(r *SecurityQueryUserRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f SecurityQueryUser) WithErrorTrace() func(*SecurityQueryUserRequest) { + return func(r *SecurityQueryUserRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f SecurityQueryUser) WithFilterPath(v ...string) func(*SecurityQueryUserRequest) { + return func(r *SecurityQueryUserRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f SecurityQueryUser) WithHeader(h map[string]string) func(*SecurityQueryUserRequest) { + return func(r *SecurityQueryUserRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f SecurityQueryUser) WithOpaqueID(s string) func(*SecurityQueryUserRequest) { + return func(r *SecurityQueryUserRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.xpack.security.saml_authenticate.go b/esapi/api.xpack.security.saml_authenticate.go index 7a42cf9080..ba4d41fba5 100644 --- a/esapi/api.xpack.security.saml_authenticate.go +++ b/esapi/api.xpack.security.saml_authenticate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.saml_complete_logout.go b/esapi/api.xpack.security.saml_complete_logout.go index 085dd22094..4c62869ffa 100644 --- a/esapi/api.xpack.security.saml_complete_logout.go +++ b/esapi/api.xpack.security.saml_complete_logout.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.saml_invalidate.go b/esapi/api.xpack.security.saml_invalidate.go index 02c9ac803f..7941e86e8b 100644 --- a/esapi/api.xpack.security.saml_invalidate.go +++ b/esapi/api.xpack.security.saml_invalidate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.saml_logout.go b/esapi/api.xpack.security.saml_logout.go index 2a301c5a6e..5f7b6e934d 100644 --- a/esapi/api.xpack.security.saml_logout.go +++ b/esapi/api.xpack.security.saml_logout.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.saml_prepare_authentication.go b/esapi/api.xpack.security.saml_prepare_authentication.go index afe82c9e1b..3cb5e33751 100644 --- a/esapi/api.xpack.security.saml_prepare_authentication.go +++ b/esapi/api.xpack.security.saml_prepare_authentication.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.saml_service_provider_metadata.go b/esapi/api.xpack.security.saml_service_provider_metadata.go index 7165e08008..434b73ca79 100644 --- a/esapi/api.xpack.security.saml_service_provider_metadata.go +++ b/esapi/api.xpack.security.saml_service_provider_metadata.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.suggest_user_profiles.go b/esapi/api.xpack.security.suggest_user_profiles.go index d6a36212d4..59e065d557 100644 --- a/esapi/api.xpack.security.suggest_user_profiles.go +++ b/esapi/api.xpack.security.suggest_user_profiles.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.update_api_key.go b/esapi/api.xpack.security.update_api_key.go index 2a97dc5c02..175fb4b9d2 100644 --- a/esapi/api.xpack.security.update_api_key.go +++ b/esapi/api.xpack.security.update_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.update_cross_cluster_api_key.go b/esapi/api.xpack.security.update_cross_cluster_api_key.go index 83ce6b7c61..46f405f692 100644 --- a/esapi/api.xpack.security.update_cross_cluster_api_key.go +++ b/esapi/api.xpack.security.update_cross_cluster_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.update_settings.go b/esapi/api.xpack.security.update_settings.go index 799ac690c4..380fe76f2c 100644 --- a/esapi/api.xpack.security.update_settings.go +++ b/esapi/api.xpack.security.update_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.update_user_profile_data.go b/esapi/api.xpack.security.update_user_profile_data.go index 46eab4e6a7..4b387d5a11 100644 --- a/esapi/api.xpack.security.update_user_profile_data.go +++ b/esapi/api.xpack.security.update_user_profile_data.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.delete_lifecycle.go b/esapi/api.xpack.slm.delete_lifecycle.go index f9290829fd..4065375fa6 100644 --- a/esapi/api.xpack.slm.delete_lifecycle.go +++ b/esapi/api.xpack.slm.delete_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.execute_lifecycle.go b/esapi/api.xpack.slm.execute_lifecycle.go index 7c4a1aa5ae..8075665e5c 100644 --- a/esapi/api.xpack.slm.execute_lifecycle.go +++ b/esapi/api.xpack.slm.execute_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.execute_retention.go b/esapi/api.xpack.slm.execute_retention.go index b6fb31f3e7..f3c3cc3c43 100644 --- a/esapi/api.xpack.slm.execute_retention.go +++ b/esapi/api.xpack.slm.execute_retention.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.get_lifecycle.go b/esapi/api.xpack.slm.get_lifecycle.go index 8cae902bf0..c5352af2d1 100644 --- a/esapi/api.xpack.slm.get_lifecycle.go +++ b/esapi/api.xpack.slm.get_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.get_stats.go b/esapi/api.xpack.slm.get_stats.go index 8a327584f6..266b886170 100644 --- a/esapi/api.xpack.slm.get_stats.go +++ b/esapi/api.xpack.slm.get_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.get_status.go b/esapi/api.xpack.slm.get_status.go index f341e228eb..2bb8dab6fe 100644 --- a/esapi/api.xpack.slm.get_status.go +++ b/esapi/api.xpack.slm.get_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.put_lifecycle.go b/esapi/api.xpack.slm.put_lifecycle.go index ba58a7ab0c..d0af4bec3a 100644 --- a/esapi/api.xpack.slm.put_lifecycle.go +++ b/esapi/api.xpack.slm.put_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.start.go b/esapi/api.xpack.slm.start.go index c5b9f3e46e..0a96dcfa03 100644 --- a/esapi/api.xpack.slm.start.go +++ b/esapi/api.xpack.slm.start.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.stop.go b/esapi/api.xpack.slm.stop.go index ef43f8ea43..512e139da2 100644 --- a/esapi/api.xpack.slm.stop.go +++ b/esapi/api.xpack.slm.stop.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.sql.clear_cursor.go b/esapi/api.xpack.sql.clear_cursor.go index 3c048e7a0e..647d8d47fd 100644 --- a/esapi/api.xpack.sql.clear_cursor.go +++ b/esapi/api.xpack.sql.clear_cursor.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.sql.delete_async.go b/esapi/api.xpack.sql.delete_async.go index 9cfccd8883..81596101b7 100644 --- a/esapi/api.xpack.sql.delete_async.go +++ b/esapi/api.xpack.sql.delete_async.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.sql.get_async.go b/esapi/api.xpack.sql.get_async.go index d7a5013791..7e8e14a7e8 100644 --- a/esapi/api.xpack.sql.get_async.go +++ b/esapi/api.xpack.sql.get_async.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.sql.get_async_status.go b/esapi/api.xpack.sql.get_async_status.go index 81dafd9842..dc77e73f65 100644 --- a/esapi/api.xpack.sql.get_async_status.go +++ b/esapi/api.xpack.sql.get_async_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.sql.query.go b/esapi/api.xpack.sql.query.go index cb4c34e2f2..f80ab0d02d 100644 --- a/esapi/api.xpack.sql.query.go +++ b/esapi/api.xpack.sql.query.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.sql.translate.go b/esapi/api.xpack.sql.translate.go index 290f9c8af7..72bd212ced 100644 --- a/esapi/api.xpack.sql.translate.go +++ b/esapi/api.xpack.sql.translate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ssl.certificates.go b/esapi/api.xpack.ssl.certificates.go index ccb543a3e5..852121348d 100644 --- a/esapi/api.xpack.ssl.certificates.go +++ b/esapi/api.xpack.ssl.certificates.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.text_structure.find_field_structure.go b/esapi/api.xpack.text_structure.find_field_structure.go new file mode 100644 index 0000000000..277e58a985 --- /dev/null +++ b/esapi/api.xpack.text_structure.find_field_structure.go @@ -0,0 +1,386 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "net/http" + "strconv" + "strings" + "time" +) + +func newTextStructureFindFieldStructureFunc(t Transport) TextStructureFindFieldStructure { + return func(field string, index string, o ...func(*TextStructureFindFieldStructureRequest)) (*Response, error) { + var r = TextStructureFindFieldStructureRequest{Field: field, Index: index} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// TextStructureFindFieldStructure - Finds the structure of a text field in an index. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/current/find-field-structure.html. +type TextStructureFindFieldStructure func(field string, index string, o ...func(*TextStructureFindFieldStructureRequest)) (*Response, error) + +// TextStructureFindFieldStructureRequest configures the Text Structure Find Field Structure API request. +type TextStructureFindFieldStructureRequest struct { + ColumnNames []string + Delimiter string + DocumentsToSample *int + EcsCompatibility string + Explain *bool + Field string + Format string + GrokPattern string + Index string + Quote string + ShouldTrimFields *bool + Timeout time.Duration + TimestampField string + TimestampFormat string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r TextStructureFindFieldStructureRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "text_structure.find_field_structure") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "GET" + + path.Grow(7 + len("/_text_structure/find_field_structure")) + path.WriteString("http://") + path.WriteString("/_text_structure/find_field_structure") + + params = make(map[string]string) + + if len(r.ColumnNames) > 0 { + params["column_names"] = strings.Join(r.ColumnNames, ",") + } + + if r.Delimiter != "" { + params["delimiter"] = r.Delimiter + } + + if r.DocumentsToSample != nil { + params["documents_to_sample"] = strconv.FormatInt(int64(*r.DocumentsToSample), 10) + } + + if r.EcsCompatibility != "" { + params["ecs_compatibility"] = r.EcsCompatibility + } + + if r.Explain != nil { + params["explain"] = strconv.FormatBool(*r.Explain) + } + + if r.Field != "" { + params["field"] = r.Field + } + + if r.Format != "" { + params["format"] = r.Format + } + + if r.GrokPattern != "" { + params["grok_pattern"] = r.GrokPattern + } + + if r.Index != "" { + params["index"] = r.Index + } + + if r.Quote != "" { + params["quote"] = r.Quote + } + + if r.ShouldTrimFields != nil { + params["should_trim_fields"] = strconv.FormatBool(*r.ShouldTrimFields) + } + + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + + if r.TimestampField != "" { + params["timestamp_field"] = r.TimestampField + } + + if r.TimestampFormat != "" { + params["timestamp_format"] = r.TimestampFormat + } + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), nil) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "text_structure.find_field_structure") + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "text_structure.find_field_structure") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f TextStructureFindFieldStructure) WithContext(v context.Context) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.ctx = v + } +} + +// WithColumnNames - optional parameter containing a comma separated list of the column names for a delimited file. +func (f TextStructureFindFieldStructure) WithColumnNames(v ...string) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.ColumnNames = v + } +} + +// WithDelimiter - optional parameter to specify the delimiter character for a delimited file - must be a single character. +func (f TextStructureFindFieldStructure) WithDelimiter(v string) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.Delimiter = v + } +} + +// WithDocumentsToSample - how many documents should be included in the analysis. +func (f TextStructureFindFieldStructure) WithDocumentsToSample(v int) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.DocumentsToSample = &v + } +} + +// WithEcsCompatibility - optional parameter to specify the compatibility mode with ecs grok patterns - may be either 'v1' or 'disabled'. +func (f TextStructureFindFieldStructure) WithEcsCompatibility(v string) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.EcsCompatibility = v + } +} + +// WithExplain - whether to include a commentary on how the structure was derived. +func (f TextStructureFindFieldStructure) WithExplain(v bool) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.Explain = &v + } +} + +// WithField - the field that should be analyzed. +func (f TextStructureFindFieldStructure) WithField(v string) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.Field = v + } +} + +// WithFormat - optional parameter to specify the high level file format. +func (f TextStructureFindFieldStructure) WithFormat(v string) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.Format = v + } +} + +// WithGrokPattern - optional parameter to specify the grok pattern that should be used to extract fields from messages in a semi-structured text file. +func (f TextStructureFindFieldStructure) WithGrokPattern(v string) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.GrokPattern = v + } +} + +// WithIndex - the index containing the analyzed field. +func (f TextStructureFindFieldStructure) WithIndex(v string) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.Index = v + } +} + +// WithQuote - optional parameter to specify the quote character for a delimited file - must be a single character. +func (f TextStructureFindFieldStructure) WithQuote(v string) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.Quote = v + } +} + +// WithShouldTrimFields - optional parameter to specify whether the values between delimiters in a delimited file should have whitespace trimmed from them. +func (f TextStructureFindFieldStructure) WithShouldTrimFields(v bool) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.ShouldTrimFields = &v + } +} + +// WithTimeout - timeout after which the analysis will be aborted. +func (f TextStructureFindFieldStructure) WithTimeout(v time.Duration) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.Timeout = v + } +} + +// WithTimestampField - optional parameter to specify the timestamp field in the file. +func (f TextStructureFindFieldStructure) WithTimestampField(v string) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.TimestampField = v + } +} + +// WithTimestampFormat - optional parameter to specify the timestamp format in the file - may be either a joda or java time format. +func (f TextStructureFindFieldStructure) WithTimestampFormat(v string) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.TimestampFormat = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f TextStructureFindFieldStructure) WithPretty() func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f TextStructureFindFieldStructure) WithHuman() func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f TextStructureFindFieldStructure) WithErrorTrace() func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f TextStructureFindFieldStructure) WithFilterPath(v ...string) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f TextStructureFindFieldStructure) WithHeader(h map[string]string) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f TextStructureFindFieldStructure) WithOpaqueID(s string) func(*TextStructureFindFieldStructureRequest) { + return func(r *TextStructureFindFieldStructureRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.xpack.text_structure.find_message_structure.go b/esapi/api.xpack.text_structure.find_message_structure.go new file mode 100644 index 0000000000..397c29aad4 --- /dev/null +++ b/esapi/api.xpack.text_structure.find_message_structure.go @@ -0,0 +1,360 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strconv" + "strings" + "time" +) + +func newTextStructureFindMessageStructureFunc(t Transport) TextStructureFindMessageStructure { + return func(body io.Reader, o ...func(*TextStructureFindMessageStructureRequest)) (*Response, error) { + var r = TextStructureFindMessageStructureRequest{Body: body} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// TextStructureFindMessageStructure - Finds the structure of a list of messages. The messages must contain data that is suitable to be ingested into Elasticsearch. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/current/find-message-structure.html. +type TextStructureFindMessageStructure func(body io.Reader, o ...func(*TextStructureFindMessageStructureRequest)) (*Response, error) + +// TextStructureFindMessageStructureRequest configures the Text Structure Find Message Structure API request. +type TextStructureFindMessageStructureRequest struct { + Body io.Reader + + ColumnNames []string + Delimiter string + EcsCompatibility string + Explain *bool + Format string + GrokPattern string + Quote string + ShouldTrimFields *bool + Timeout time.Duration + TimestampField string + TimestampFormat string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r TextStructureFindMessageStructureRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "text_structure.find_message_structure") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "POST" + + path.Grow(7 + len("/_text_structure/find_message_structure")) + path.WriteString("http://") + path.WriteString("/_text_structure/find_message_structure") + + params = make(map[string]string) + + if len(r.ColumnNames) > 0 { + params["column_names"] = strings.Join(r.ColumnNames, ",") + } + + if r.Delimiter != "" { + params["delimiter"] = r.Delimiter + } + + if r.EcsCompatibility != "" { + params["ecs_compatibility"] = r.EcsCompatibility + } + + if r.Explain != nil { + params["explain"] = strconv.FormatBool(*r.Explain) + } + + if r.Format != "" { + params["format"] = r.Format + } + + if r.GrokPattern != "" { + params["grok_pattern"] = r.GrokPattern + } + + if r.Quote != "" { + params["quote"] = r.Quote + } + + if r.ShouldTrimFields != nil { + params["should_trim_fields"] = strconv.FormatBool(*r.ShouldTrimFields) + } + + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + + if r.TimestampField != "" { + params["timestamp_field"] = r.TimestampField + } + + if r.TimestampFormat != "" { + params["timestamp_format"] = r.TimestampFormat + } + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "text_structure.find_message_structure") + if reader := instrument.RecordRequestBody(ctx, "text_structure.find_message_structure", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "text_structure.find_message_structure") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f TextStructureFindMessageStructure) WithContext(v context.Context) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.ctx = v + } +} + +// WithColumnNames - optional parameter containing a comma separated list of the column names for a delimited file. +func (f TextStructureFindMessageStructure) WithColumnNames(v ...string) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.ColumnNames = v + } +} + +// WithDelimiter - optional parameter to specify the delimiter character for a delimited file - must be a single character. +func (f TextStructureFindMessageStructure) WithDelimiter(v string) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.Delimiter = v + } +} + +// WithEcsCompatibility - optional parameter to specify the compatibility mode with ecs grok patterns - may be either 'v1' or 'disabled'. +func (f TextStructureFindMessageStructure) WithEcsCompatibility(v string) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.EcsCompatibility = v + } +} + +// WithExplain - whether to include a commentary on how the structure was derived. +func (f TextStructureFindMessageStructure) WithExplain(v bool) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.Explain = &v + } +} + +// WithFormat - optional parameter to specify the high level file format. +func (f TextStructureFindMessageStructure) WithFormat(v string) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.Format = v + } +} + +// WithGrokPattern - optional parameter to specify the grok pattern that should be used to extract fields from messages in a semi-structured text file. +func (f TextStructureFindMessageStructure) WithGrokPattern(v string) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.GrokPattern = v + } +} + +// WithQuote - optional parameter to specify the quote character for a delimited file - must be a single character. +func (f TextStructureFindMessageStructure) WithQuote(v string) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.Quote = v + } +} + +// WithShouldTrimFields - optional parameter to specify whether the values between delimiters in a delimited file should have whitespace trimmed from them. +func (f TextStructureFindMessageStructure) WithShouldTrimFields(v bool) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.ShouldTrimFields = &v + } +} + +// WithTimeout - timeout after which the analysis will be aborted. +func (f TextStructureFindMessageStructure) WithTimeout(v time.Duration) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.Timeout = v + } +} + +// WithTimestampField - optional parameter to specify the timestamp field in the file. +func (f TextStructureFindMessageStructure) WithTimestampField(v string) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.TimestampField = v + } +} + +// WithTimestampFormat - optional parameter to specify the timestamp format in the file - may be either a joda or java time format. +func (f TextStructureFindMessageStructure) WithTimestampFormat(v string) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.TimestampFormat = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f TextStructureFindMessageStructure) WithPretty() func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f TextStructureFindMessageStructure) WithHuman() func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f TextStructureFindMessageStructure) WithErrorTrace() func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f TextStructureFindMessageStructure) WithFilterPath(v ...string) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f TextStructureFindMessageStructure) WithHeader(h map[string]string) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f TextStructureFindMessageStructure) WithOpaqueID(s string) func(*TextStructureFindMessageStructureRequest) { + return func(r *TextStructureFindMessageStructureRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.xpack.text_structure.find_structure.go b/esapi/api.xpack.text_structure.find_structure.go index 1b06f989c9..9862bb680f 100644 --- a/esapi/api.xpack.text_structure.find_structure.go +++ b/esapi/api.xpack.text_structure.find_structure.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.text_structure.test_grok_pattern.go b/esapi/api.xpack.text_structure.test_grok_pattern.go new file mode 100644 index 0000000000..93b2ce78db --- /dev/null +++ b/esapi/api.xpack.text_structure.test_grok_pattern.go @@ -0,0 +1,238 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.14.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newTextStructureTestGrokPatternFunc(t Transport) TextStructureTestGrokPattern { + return func(body io.Reader, o ...func(*TextStructureTestGrokPatternRequest)) (*Response, error) { + var r = TextStructureTestGrokPatternRequest{Body: body} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// TextStructureTestGrokPattern - Tests a Grok pattern on some text. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/test-grok-pattern.html. +type TextStructureTestGrokPattern func(body io.Reader, o ...func(*TextStructureTestGrokPatternRequest)) (*Response, error) + +// TextStructureTestGrokPatternRequest configures the Text Structure Test Grok Pattern API request. +type TextStructureTestGrokPatternRequest struct { + Body io.Reader + + EcsCompatibility string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r TextStructureTestGrokPatternRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "text_structure.test_grok_pattern") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "POST" + + path.Grow(7 + len("/_text_structure/test_grok_pattern")) + path.WriteString("http://") + path.WriteString("/_text_structure/test_grok_pattern") + + params = make(map[string]string) + + if r.EcsCompatibility != "" { + params["ecs_compatibility"] = r.EcsCompatibility + } + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "text_structure.test_grok_pattern") + if reader := instrument.RecordRequestBody(ctx, "text_structure.test_grok_pattern", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "text_structure.test_grok_pattern") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f TextStructureTestGrokPattern) WithContext(v context.Context) func(*TextStructureTestGrokPatternRequest) { + return func(r *TextStructureTestGrokPatternRequest) { + r.ctx = v + } +} + +// WithEcsCompatibility - optional parameter to specify the compatibility mode with ecs grok patterns - may be either 'v1' or 'disabled'. +func (f TextStructureTestGrokPattern) WithEcsCompatibility(v string) func(*TextStructureTestGrokPatternRequest) { + return func(r *TextStructureTestGrokPatternRequest) { + r.EcsCompatibility = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f TextStructureTestGrokPattern) WithPretty() func(*TextStructureTestGrokPatternRequest) { + return func(r *TextStructureTestGrokPatternRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f TextStructureTestGrokPattern) WithHuman() func(*TextStructureTestGrokPatternRequest) { + return func(r *TextStructureTestGrokPatternRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f TextStructureTestGrokPattern) WithErrorTrace() func(*TextStructureTestGrokPatternRequest) { + return func(r *TextStructureTestGrokPatternRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f TextStructureTestGrokPattern) WithFilterPath(v ...string) func(*TextStructureTestGrokPatternRequest) { + return func(r *TextStructureTestGrokPatternRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f TextStructureTestGrokPattern) WithHeader(h map[string]string) func(*TextStructureTestGrokPatternRequest) { + return func(r *TextStructureTestGrokPatternRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f TextStructureTestGrokPattern) WithOpaqueID(s string) func(*TextStructureTestGrokPatternRequest) { + return func(r *TextStructureTestGrokPatternRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.xpack.transform.delete_transform.go b/esapi/api.xpack.transform.delete_transform.go index 91d73651e2..8e6d94a484 100644 --- a/esapi/api.xpack.transform.delete_transform.go +++ b/esapi/api.xpack.transform.delete_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.get_transform.go b/esapi/api.xpack.transform.get_transform.go index 28f89d5b60..58c370390f 100644 --- a/esapi/api.xpack.transform.get_transform.go +++ b/esapi/api.xpack.transform.get_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.get_transform_stats.go b/esapi/api.xpack.transform.get_transform_stats.go index 1ba3a49688..179d59180e 100644 --- a/esapi/api.xpack.transform.get_transform_stats.go +++ b/esapi/api.xpack.transform.get_transform_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.preview_transform.go b/esapi/api.xpack.transform.preview_transform.go index 2a6a11cd87..5e48c742cb 100644 --- a/esapi/api.xpack.transform.preview_transform.go +++ b/esapi/api.xpack.transform.preview_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.put_transform.go b/esapi/api.xpack.transform.put_transform.go index dd51b142ee..cff70888b8 100644 --- a/esapi/api.xpack.transform.put_transform.go +++ b/esapi/api.xpack.transform.put_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.reset_transform.go b/esapi/api.xpack.transform.reset_transform.go index d97eb1ea3f..43278dc9bc 100644 --- a/esapi/api.xpack.transform.reset_transform.go +++ b/esapi/api.xpack.transform.reset_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.schedule_now_transform.go b/esapi/api.xpack.transform.schedule_now_transform.go index 3c3679edfc..4031fc5c5e 100644 --- a/esapi/api.xpack.transform.schedule_now_transform.go +++ b/esapi/api.xpack.transform.schedule_now_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.start_transform.go b/esapi/api.xpack.transform.start_transform.go index 68ce78a35a..6419026f6c 100644 --- a/esapi/api.xpack.transform.start_transform.go +++ b/esapi/api.xpack.transform.start_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.stop_transform.go b/esapi/api.xpack.transform.stop_transform.go index 26a77ae771..24c4097914 100644 --- a/esapi/api.xpack.transform.stop_transform.go +++ b/esapi/api.xpack.transform.stop_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.update_transform.go b/esapi/api.xpack.transform.update_transform.go index c6a1a4c49e..9e40e9f5c7 100644 --- a/esapi/api.xpack.transform.update_transform.go +++ b/esapi/api.xpack.transform.update_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.upgrade_transforms.go b/esapi/api.xpack.transform.upgrade_transforms.go index 7d10c0dbe4..0e89ce10a5 100644 --- a/esapi/api.xpack.transform.upgrade_transforms.go +++ b/esapi/api.xpack.transform.upgrade_transforms.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.ack_watch.go b/esapi/api.xpack.watcher.ack_watch.go index 7ba9d9d231..94b723f4c6 100644 --- a/esapi/api.xpack.watcher.ack_watch.go +++ b/esapi/api.xpack.watcher.ack_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.activate_watch.go b/esapi/api.xpack.watcher.activate_watch.go index 668baa3894..e6cfa58c65 100644 --- a/esapi/api.xpack.watcher.activate_watch.go +++ b/esapi/api.xpack.watcher.activate_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.deactivate_watch.go b/esapi/api.xpack.watcher.deactivate_watch.go index 1584ebdd55..217992055d 100644 --- a/esapi/api.xpack.watcher.deactivate_watch.go +++ b/esapi/api.xpack.watcher.deactivate_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.delete_watch.go b/esapi/api.xpack.watcher.delete_watch.go index d60782cff2..9194f80d80 100644 --- a/esapi/api.xpack.watcher.delete_watch.go +++ b/esapi/api.xpack.watcher.delete_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.execute_watch.go b/esapi/api.xpack.watcher.execute_watch.go index 7bf90ed55d..afddb02848 100644 --- a/esapi/api.xpack.watcher.execute_watch.go +++ b/esapi/api.xpack.watcher.execute_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.get_settings.go b/esapi/api.xpack.watcher.get_settings.go index 1f98915210..3a55b46d1e 100644 --- a/esapi/api.xpack.watcher.get_settings.go +++ b/esapi/api.xpack.watcher.get_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.get_watch.go b/esapi/api.xpack.watcher.get_watch.go index 86d27a25b3..b831ac6d2d 100644 --- a/esapi/api.xpack.watcher.get_watch.go +++ b/esapi/api.xpack.watcher.get_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.put_watch.go b/esapi/api.xpack.watcher.put_watch.go index 44200a7235..b5af1bebae 100644 --- a/esapi/api.xpack.watcher.put_watch.go +++ b/esapi/api.xpack.watcher.put_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.query_watches.go b/esapi/api.xpack.watcher.query_watches.go index 32cd3e9546..18d069d092 100644 --- a/esapi/api.xpack.watcher.query_watches.go +++ b/esapi/api.xpack.watcher.query_watches.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.start.go b/esapi/api.xpack.watcher.start.go index fc3becf593..8bc9025e6c 100644 --- a/esapi/api.xpack.watcher.start.go +++ b/esapi/api.xpack.watcher.start.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.stats.go b/esapi/api.xpack.watcher.stats.go index f17e4f77dd..86d60e2b83 100644 --- a/esapi/api.xpack.watcher.stats.go +++ b/esapi/api.xpack.watcher.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.stop.go b/esapi/api.xpack.watcher.stop.go index 10c878e7c9..8db72d3e27 100644 --- a/esapi/api.xpack.watcher.stop.go +++ b/esapi/api.xpack.watcher.stop.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.update_settings.go b/esapi/api.xpack.watcher.update_settings.go index ee48817ed2..824473b910 100644 --- a/esapi/api.xpack.watcher.update_settings.go +++ b/esapi/api.xpack.watcher.update_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.xpack.info.go b/esapi/api.xpack.xpack.info.go index 489bec4742..cdabb4b93a 100644 --- a/esapi/api.xpack.xpack.info.go +++ b/esapi/api.xpack.xpack.info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.xpack.usage.go b/esapi/api.xpack.xpack.usage.go index da299c36a2..5f39648cb8 100644 --- a/esapi/api.xpack.xpack.usage.go +++ b/esapi/api.xpack.xpack.usage.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.12.0: DO NOT EDIT +// Code generated from specification version 8.14.0: DO NOT EDIT package esapi diff --git a/internal/build/cmd/generate/commands/gentests/command.go b/internal/build/cmd/generate/commands/gentests/command.go index d1d7c2e292..80988a7616 100644 --- a/internal/build/cmd/generate/commands/gentests/command.go +++ b/internal/build/cmd/generate/commands/gentests/command.go @@ -26,6 +26,7 @@ import ( "io" "os" "path/filepath" + "regexp" "strings" "time" @@ -89,7 +90,6 @@ var gentestsCmd = &cobra.Command{ } // Command represents the "gentests" command. -// type Command struct { Input string Output string @@ -100,7 +100,6 @@ type Command struct { } // Execute runs the command. -// func (cmd *Command) Execute() error { if len(apiRegistry) < 1 { return fmt.Errorf("API registry in 'api_registry.gen.go' is empty: Did you run go generate?") @@ -150,7 +149,8 @@ func (cmd *Command) Execute() error { } for _, skipFile := range skipFiles { - if strings.HasSuffix(fpath, skipFile) { + r, _ := regexp.Compile(skipFile) + if strings.HasSuffix(fpath, skipFile) || r.MatchString(fpath) { if utils.IsTTY() { fmt.Fprint(os.Stderr, "\x1b[2m") } @@ -163,6 +163,7 @@ func (cmd *Command) Execute() error { fmt.Fprint(os.Stderr, "\x1b[0m") } skip = true + break } } diff --git a/internal/build/cmd/generate/commands/gentests/generator.go b/internal/build/cmd/generate/commands/gentests/generator.go index 3828b7a3c3..2f70c9a2a4 100644 --- a/internal/build/cmd/generate/commands/gentests/generator.go +++ b/internal/build/cmd/generate/commands/gentests/generator.go @@ -44,7 +44,6 @@ func init() { } // Generator represents the "gentests" generator. -// type Generator struct { b bytes.Buffer @@ -52,7 +51,6 @@ type Generator struct { } // Output returns the generator output. -// func (g *Generator) Output() (io.Reader, error) { name := g.TestSuite.Name() if g.TestSuite.Type == "xpack" { @@ -121,7 +119,6 @@ func (g *Generator) Output() (io.Reader, error) { } // OutputFormatted returns a formatted generator output. -// func (g *Generator) OutputFormatted() (io.Reader, error) { out, err := g.Output() if err != nil { @@ -306,7 +303,6 @@ _ = recoverPanic } // Reference: https://github.com/elastic/elasticsearch/blob/master/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java -// func (g *Generator) genCommonSetup() { g.w(` // ----- Common Setup ------------------------------------------------------------- @@ -488,7 +484,6 @@ func (g *Generator) genCommonSetup() { // Reference: https://github.com/elastic/elasticsearch/blob/master/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java // Reference: https://github.com/elastic/elasticsearch/blob/master/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java -// func (g *Generator) genXPackSetup() { g.w(` // ----- XPack Setup ------------------------------------------------------------- @@ -1307,6 +1302,17 @@ func (g *Generator) genAction(a Action, skipBody ...bool) { } g.w("\t\tstrings.NewReader(`" + fmt.Sprintf("%s", j) + "`)") } + case "*bool": + switch v.(type) { + case []interface{}: + var vvv string + for _, vv := range v.([]interface{}) { + vvv = fmt.Sprintf("%v", vv) + } + g.w(fmt.Sprintf("&[]bool{%s}[0]", vvv)) + default: + panic(fmt.Sprintf("<%s> %s{}.%s: unexpected value <%T> %#v", typ, a.Request(), k, v, v)) + } } g.w(",\n") diff --git a/internal/build/cmd/generate/commands/gentests/skips.go b/internal/build/cmd/generate/commands/gentests/skips.go index f620e780f7..8e5c16f43f 100644 --- a/internal/build/cmd/generate/commands/gentests/skips.go +++ b/internal/build/cmd/generate/commands/gentests/skips.go @@ -61,6 +61,7 @@ var skipFiles = []string{ "ml/3rd_party_deployment.yml", // incompatible ml tests "dlm/10_usage.yml", // incompatible float expansion "api_key/60_admin_user.yml", + ".*esql\\/.*.yml", } // TODO: Comments into descriptions for `Skip()` @@ -170,6 +171,8 @@ indices.put_mapping/10_basic.yml: test/indices.put_template/10_basic.yml: # Incompatible regex +cat.indices/10_basic.yml: + - Test cat indices output for closed index (pre 7.2.0) cat.templates/10_basic.yml: - "Sort templates" - "Multiple template" @@ -203,6 +206,9 @@ tsdb/80_index_resize.yml: tsdb/40_search.yml: - aggregate a metric +tsdb/70_dimension_types.yml: + - flattened field missing routing path field + # Deliberate wrong type doesn't match Go types cluster.desired_nodes/10_basic.yml: - Test version must be a number @@ -230,6 +236,8 @@ api_key/10_basic.yml: - Test invalidate api keys api_key/11_invalidation.yml: - Test invalidate api key by username +api_key/21_query_with_aggs.yml: + - Test composite aggs api key rollup/put_job.yml: - Test put job with templates @@ -327,6 +335,9 @@ ml/semantic_search.yml: # model is not deployed to any node ml/text_expansion_search.yml: ml/text_expansion_search_sparse_vector.yml: +ml/search_knn_query_vector_builder.yml: +ml/text_embedding_search.yml: +ml/text_expansion_search_rank_features.yml: # TEMPORARY: Missing 'body: { indices: "test_index" }' payload, TODO: PR snapshot/10_basic.yml: @@ -336,6 +347,9 @@ snapshot/10_basic.yml: users/10_basic.yml: - Test put user with password hash +users/40_query.yml: + - Test query user + # Slash in index name is not escaped (BUG) security/authz/13_index_datemath.yml: - Test indexing documents with datemath, when permitted @@ -512,4 +526,6 @@ esql/40_unsupported_types.yml: esql/50_index_patterns.yml: - disjoint_mappings + + ` diff --git a/internal/build/go.mod b/internal/build/go.mod index 03648be440..cfb75d493f 100644 --- a/internal/build/go.mod +++ b/internal/build/go.mod @@ -9,7 +9,7 @@ require ( github.com/elastic/go-elasticsearch/v8 v8.0.0-20210817150010-57d659deaca7 github.com/spf13/cobra v1.8.0 golang.org/x/crypto v0.19.0 - golang.org/x/tools v0.18.0 + golang.org/x/tools v0.19.0 gopkg.in/yaml.v2 v2.4.0 ) @@ -17,7 +17,7 @@ require ( github.com/dlclark/regexp2 v1.4.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/spf13/pflag v1.0.5 // indirect - golang.org/x/mod v0.15.0 // indirect - golang.org/x/sys v0.17.0 // indirect + golang.org/x/mod v0.16.0 // indirect + golang.org/x/sys v0.18.0 // indirect golang.org/x/term v0.17.0 // indirect ) diff --git a/internal/build/go.sum b/internal/build/go.sum index bb89ec413a..1964cd18a6 100644 --- a/internal/build/go.sum +++ b/internal/build/go.sum @@ -22,14 +22,20 @@ golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/mod v0.15.0 h1:SernR4v+D55NyBH2QiEQrlBAnj1ECL6AGrA5+dPaMY8= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.16.0 h1:QX4fJ0Rr5cPQCF7O9lh9Se4pmwfwskqZfq5moyldzic= +golang.org/x/mod v0.16.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= +golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/tools v0.18.0 h1:k8NLag8AGHnn+PHbl7g43CtqZAwG60vZkLqgyZgIHgQ= golang.org/x/tools v0.18.0/go.mod h1:GL7B4CwcLLeo59yx/9UWWuNOW1n3VZ4f5axWfML7Lcg= +golang.org/x/tools v0.19.0 h1:tfGCXNR1OsFG+sVdLAitlpjAvD/I6dHDKnYrpEZUHkw= +golang.org/x/tools v0.19.0/go.mod h1:qoJWxmGSIBmAeriMx19ogtrEPrGtDbPK634QFIcLAhc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= diff --git a/typedapi/api._.go b/typedapi/api._.go index 3039097231..c2dbb50059 100644 --- a/typedapi/api._.go +++ b/typedapi/api._.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package typedapi @@ -206,6 +206,7 @@ import ( indices_recovery "github.com/elastic/go-elasticsearch/v8/typedapi/indices/recovery" indices_refresh "github.com/elastic/go-elasticsearch/v8/typedapi/indices/refresh" indices_reload_search_analyzers "github.com/elastic/go-elasticsearch/v8/typedapi/indices/reloadsearchanalyzers" + indices_resolve_cluster "github.com/elastic/go-elasticsearch/v8/typedapi/indices/resolvecluster" indices_resolve_index "github.com/elastic/go-elasticsearch/v8/typedapi/indices/resolveindex" indices_rollover "github.com/elastic/go-elasticsearch/v8/typedapi/indices/rollover" indices_segments "github.com/elastic/go-elasticsearch/v8/typedapi/indices/segments" @@ -1133,6 +1134,10 @@ type Indices struct { // Reloads an index's search analyzers and their resources. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-reload-analyzers.html ReloadSearchAnalyzers indices_reload_search_analyzers.NewReloadSearchAnalyzers + // Resolves the specified index expressions to return information about each + // cluster, including the local cluster, if included. + // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-resolve-cluster-api.html + ResolveCluster indices_resolve_cluster.NewResolveCluster // Returns information about any matching indices, aliases, and data streams // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-resolve-index-api.html ResolveIndex indices_resolve_index.NewResolveIndex @@ -1952,7 +1957,7 @@ type TextStructure struct { // https://www.elastic.co/guide/en/elasticsearch/reference/current/find-structure.html FindStructure text_structure_find_structure.NewFindStructure // Tests a Grok pattern on some text. - // https://www.elastic.co/guide/en/elasticsearch/reference/current/test-grok-pattern-api.html + // https://www.elastic.co/guide/en/elasticsearch/reference/current/test-grok-pattern.html TestGrokPattern text_structure_test_grok_pattern.NewTestGrokPattern } @@ -2483,6 +2488,7 @@ func New(tp elastictransport.Interface) *API { Recovery: indices_recovery.NewRecoveryFunc(tp), Refresh: indices_refresh.NewRefreshFunc(tp), ReloadSearchAnalyzers: indices_reload_search_analyzers.NewReloadSearchAnalyzersFunc(tp), + ResolveCluster: indices_resolve_cluster.NewResolveClusterFunc(tp), ResolveIndex: indices_resolve_index.NewResolveIndexFunc(tp), Rollover: indices_rollover.NewRolloverFunc(tp), Segments: indices_segments.NewSegmentsFunc(tp), diff --git a/typedapi/asyncsearch/delete/delete.go b/typedapi/asyncsearch/delete/delete.go index 89682976d7..067edead3a 100644 --- a/typedapi/asyncsearch/delete/delete.go +++ b/typedapi/asyncsearch/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an async search by ID. If the search is still running, the search // request will be cancelled. Otherwise, the saved search results are deleted. diff --git a/typedapi/asyncsearch/delete/response.go b/typedapi/asyncsearch/delete/response.go index 411a1e02f5..57d8c4ac97 100644 --- a/typedapi/asyncsearch/delete/response.go +++ b/typedapi/asyncsearch/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/async_search/delete/AsyncSearchDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/async_search/delete/AsyncSearchDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/asyncsearch/get/get.go b/typedapi/asyncsearch/get/get.go index de8305220e..f79b370a28 100644 --- a/typedapi/asyncsearch/get/get.go +++ b/typedapi/asyncsearch/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves the results of a previously submitted async search request given // its ID. diff --git a/typedapi/asyncsearch/get/response.go b/typedapi/asyncsearch/get/response.go index 712b137f24..2d8060f605 100644 --- a/typedapi/asyncsearch/get/response.go +++ b/typedapi/asyncsearch/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/async_search/get/AsyncSearchGetResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/async_search/get/AsyncSearchGetResponse.ts#L22-L24 type Response struct { // CompletionTime Indicates when the async search completed. Only present diff --git a/typedapi/asyncsearch/status/response.go b/typedapi/asyncsearch/status/response.go index 8d7d7305ef..7b412f54b9 100644 --- a/typedapi/asyncsearch/status/response.go +++ b/typedapi/asyncsearch/status/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package status @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package status // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/async_search/status/AsyncSearchStatusResponse.ts#L39-L41 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/async_search/status/AsyncSearchStatusResponse.ts#L39-L41 type Response struct { // Clusters_ Metadata about clusters involved in the cross-cluster search. diff --git a/typedapi/asyncsearch/status/status.go b/typedapi/asyncsearch/status/status.go index 2e99ca2b23..37ebb714d5 100644 --- a/typedapi/asyncsearch/status/status.go +++ b/typedapi/asyncsearch/status/status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves the status of a previously submitted async search request given its // ID. diff --git a/typedapi/asyncsearch/submit/request.go b/typedapi/asyncsearch/submit/request.go index 7d1395bdfa..f980f34318 100644 --- a/typedapi/asyncsearch/submit/request.go +++ b/typedapi/asyncsearch/submit/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package submit @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package submit // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/async_search/submit/AsyncSearchSubmitRequest.ts#L55-L286 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/async_search/submit/AsyncSearchSubmitRequest.ts#L55-L286 type Request struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Collapse *types.FieldCollapse `json:"collapse,omitempty"` @@ -165,17 +165,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Aggregations = make(map[string]types.Aggregations, 0) } if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "collapse": if err := dec.Decode(&s.Collapse); err != nil { - return err + return fmt.Errorf("%s | %w", "Collapse", err) } case "docvalue_fields": if err := dec.Decode(&s.DocvalueFields); err != nil { - return err + return fmt.Errorf("%s | %w", "DocvalueFields", err) } case "explain": @@ -185,7 +185,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Explain", err) } s.Explain = &value case bool: @@ -197,12 +197,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Ext = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Ext); err != nil { - return err + return fmt.Errorf("%s | %w", "Ext", err) } case "fields": if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "from": @@ -213,7 +213,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } s.From = &value case float64: @@ -223,12 +223,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "highlight": if err := dec.Decode(&s.Highlight); err != nil { - return err + return fmt.Errorf("%s | %w", "Highlight", err) } case "indices_boost": if err := dec.Decode(&s.IndicesBoost); err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesBoost", err) } case "knn": @@ -237,13 +237,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := types.NewKnnQuery() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Knn", err) } s.Knn = append(s.Knn, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Knn); err != nil { - return err + return fmt.Errorf("%s | %w", "Knn", err) } } @@ -254,7 +254,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinScore", err) } f := types.Float64(value) s.MinScore = &f @@ -265,12 +265,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "pit": if err := dec.Decode(&s.Pit); err != nil { - return err + return fmt.Errorf("%s | %w", "Pit", err) } case "post_filter": if err := dec.Decode(&s.PostFilter); err != nil { - return err + return fmt.Errorf("%s | %w", "PostFilter", err) } case "profile": @@ -280,7 +280,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } s.Profile = &value case bool: @@ -289,7 +289,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "rescore": @@ -298,19 +298,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := types.NewRescore() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Rescore", err) } s.Rescore = append(s.Rescore, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Rescore); err != nil { - return err + return fmt.Errorf("%s | %w", "Rescore", err) } } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } case "script_fields": @@ -318,12 +318,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.ScriptFields = make(map[string]types.ScriptField, 0) } if err := dec.Decode(&s.ScriptFields); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptFields", err) } case "search_after": if err := dec.Decode(&s.SearchAfter); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchAfter", err) } case "seq_no_primary_term": @@ -333,7 +333,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNoPrimaryTerm", err) } s.SeqNoPrimaryTerm = &value case bool: @@ -348,7 +348,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -358,7 +358,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "slice": if err := dec.Decode(&s.Slice); err != nil { - return err + return fmt.Errorf("%s | %w", "Slice", err) } case "sort": @@ -367,24 +367,24 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(types.SortCombinations) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } s.Sort = append(s.Sort, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } case "stats": if err := dec.Decode(&s.Stats); err != nil { - return err + return fmt.Errorf("%s | %w", "Stats", err) } case "stored_fields": @@ -393,19 +393,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } s.StoredFields = append(s.StoredFields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StoredFields); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } } case "suggest": if err := dec.Decode(&s.Suggest); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } case "terminate_after": @@ -415,7 +415,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TerminateAfter", err) } s.TerminateAfter = &value case float64: @@ -426,7 +426,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "timeout": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -442,7 +442,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TrackScores", err) } s.TrackScores = &value case bool: @@ -451,7 +451,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "track_total_hits": if err := dec.Decode(&s.TrackTotalHits); err != nil { - return err + return fmt.Errorf("%s | %w", "TrackTotalHits", err) } case "version": @@ -461,7 +461,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } s.Version = &value case bool: diff --git a/typedapi/asyncsearch/submit/response.go b/typedapi/asyncsearch/submit/response.go index 9f3aac0820..6da90c564c 100644 --- a/typedapi/asyncsearch/submit/response.go +++ b/typedapi/asyncsearch/submit/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package submit @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package submit // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/async_search/submit/AsyncSearchSubmitResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/async_search/submit/AsyncSearchSubmitResponse.ts#L22-L24 type Response struct { // CompletionTime Indicates when the async search completed. Only present diff --git a/typedapi/asyncsearch/submit/submit.go b/typedapi/asyncsearch/submit/submit.go index 0235e2d7e5..f1c8a3b4d9 100644 --- a/typedapi/asyncsearch/submit/submit.go +++ b/typedapi/asyncsearch/submit/submit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Executes a search request asynchronously. package submit diff --git a/typedapi/autoscaling/deleteautoscalingpolicy/delete_autoscaling_policy.go b/typedapi/autoscaling/deleteautoscalingpolicy/delete_autoscaling_policy.go index 6f0f1fa5ad..a5cf42b5a7 100644 --- a/typedapi/autoscaling/deleteautoscalingpolicy/delete_autoscaling_policy.go +++ b/typedapi/autoscaling/deleteautoscalingpolicy/delete_autoscaling_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an autoscaling policy. Designed for indirect use by ECE/ESS and ECK. // Direct use is not supported. diff --git a/typedapi/autoscaling/deleteautoscalingpolicy/response.go b/typedapi/autoscaling/deleteautoscalingpolicy/response.go index ec3bfcfc76..88234d2dea 100644 --- a/typedapi/autoscaling/deleteautoscalingpolicy/response.go +++ b/typedapi/autoscaling/deleteautoscalingpolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deleteautoscalingpolicy // Response holds the response body struct for the package deleteautoscalingpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/autoscaling/delete_autoscaling_policy/DeleteAutoscalingPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/autoscaling/delete_autoscaling_policy/DeleteAutoscalingPolicyResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/autoscaling/getautoscalingcapacity/get_autoscaling_capacity.go b/typedapi/autoscaling/getautoscalingcapacity/get_autoscaling_capacity.go index c2c8413c82..0e3b225832 100644 --- a/typedapi/autoscaling/getautoscalingcapacity/get_autoscaling_capacity.go +++ b/typedapi/autoscaling/getautoscalingcapacity/get_autoscaling_capacity.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Gets the current autoscaling capacity based on the configured autoscaling // policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not diff --git a/typedapi/autoscaling/getautoscalingcapacity/response.go b/typedapi/autoscaling/getautoscalingcapacity/response.go index b69c9c75f2..d4ab12b43c 100644 --- a/typedapi/autoscaling/getautoscalingcapacity/response.go +++ b/typedapi/autoscaling/getautoscalingcapacity/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getautoscalingcapacity @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getautoscalingcapacity // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L25-L29 type Response struct { Policies map[string]types.AutoscalingDeciders `json:"policies"` } diff --git a/typedapi/autoscaling/getautoscalingpolicy/get_autoscaling_policy.go b/typedapi/autoscaling/getautoscalingpolicy/get_autoscaling_policy.go index 0738f88cd7..f9b537387b 100644 --- a/typedapi/autoscaling/getautoscalingpolicy/get_autoscaling_policy.go +++ b/typedapi/autoscaling/getautoscalingpolicy/get_autoscaling_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves an autoscaling policy. Designed for indirect use by ECE/ESS and // ECK. Direct use is not supported. diff --git a/typedapi/autoscaling/getautoscalingpolicy/response.go b/typedapi/autoscaling/getautoscalingpolicy/response.go index 651633074f..d0e26a85fa 100644 --- a/typedapi/autoscaling/getautoscalingpolicy/response.go +++ b/typedapi/autoscaling/getautoscalingpolicy/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getautoscalingpolicy @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getautoscalingpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/autoscaling/get_autoscaling_policy/GetAutoscalingPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/autoscaling/get_autoscaling_policy/GetAutoscalingPolicyResponse.ts#L22-L24 type Response struct { // Deciders Decider settings diff --git a/typedapi/autoscaling/putautoscalingpolicy/put_autoscaling_policy.go b/typedapi/autoscaling/putautoscalingpolicy/put_autoscaling_policy.go index 423ae3ea56..b21fa775d7 100644 --- a/typedapi/autoscaling/putautoscalingpolicy/put_autoscaling_policy.go +++ b/typedapi/autoscaling/putautoscalingpolicy/put_autoscaling_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a new autoscaling policy. Designed for indirect use by ECE/ESS and // ECK. Direct use is not supported. diff --git a/typedapi/autoscaling/putautoscalingpolicy/request.go b/typedapi/autoscaling/putautoscalingpolicy/request.go index ec5a34f816..5b40942543 100644 --- a/typedapi/autoscaling/putautoscalingpolicy/request.go +++ b/typedapi/autoscaling/putautoscalingpolicy/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putautoscalingpolicy @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package putautoscalingpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/autoscaling/put_autoscaling_policy/PutAutoscalingPolicyRequest.ts#L24-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/autoscaling/put_autoscaling_policy/PutAutoscalingPolicyRequest.ts#L24-L35 type Request = types.AutoscalingPolicy diff --git a/typedapi/autoscaling/putautoscalingpolicy/response.go b/typedapi/autoscaling/putautoscalingpolicy/response.go index 4c1136faf8..ae2e7fa1f3 100644 --- a/typedapi/autoscaling/putautoscalingpolicy/response.go +++ b/typedapi/autoscaling/putautoscalingpolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putautoscalingpolicy // Response holds the response body struct for the package putautoscalingpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/autoscaling/put_autoscaling_policy/PutAutoscalingPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/autoscaling/put_autoscaling_policy/PutAutoscalingPolicyResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/cat/aliases/aliases.go b/typedapi/cat/aliases/aliases.go index 6d8940485e..c45625ec6c 100644 --- a/typedapi/cat/aliases/aliases.go +++ b/typedapi/cat/aliases/aliases.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Shows information about currently configured aliases to indices including // filter and routing infos. diff --git a/typedapi/cat/aliases/response.go b/typedapi/cat/aliases/response.go index efce67e72b..78c5085d9e 100644 --- a/typedapi/cat/aliases/response.go +++ b/typedapi/cat/aliases/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package aliases @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package aliases // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/aliases/CatAliasesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/aliases/CatAliasesResponse.ts#L22-L24 type Response []types.AliasesRecord diff --git a/typedapi/cat/allocation/allocation.go b/typedapi/cat/allocation/allocation.go index ce61aeeb53..dc87d04161 100644 --- a/typedapi/cat/allocation/allocation.go +++ b/typedapi/cat/allocation/allocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Provides a snapshot of how many shards are allocated to each data node and // how much disk space they are using. diff --git a/typedapi/cat/allocation/response.go b/typedapi/cat/allocation/response.go index 34444739d7..f8cb38447d 100644 --- a/typedapi/cat/allocation/response.go +++ b/typedapi/cat/allocation/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package allocation @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package allocation // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/allocation/CatAllocationResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/allocation/CatAllocationResponse.ts#L22-L24 type Response []types.AllocationRecord diff --git a/typedapi/cat/componenttemplates/component_templates.go b/typedapi/cat/componenttemplates/component_templates.go index a746cd5f46..00a193d5a8 100644 --- a/typedapi/cat/componenttemplates/component_templates.go +++ b/typedapi/cat/componenttemplates/component_templates.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about existing component_templates templates. package componenttemplates diff --git a/typedapi/cat/componenttemplates/response.go b/typedapi/cat/componenttemplates/response.go index 0eb56cb7ca..e3da726679 100644 --- a/typedapi/cat/componenttemplates/response.go +++ b/typedapi/cat/componenttemplates/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package componenttemplates @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package componenttemplates // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/component_templates/CatComponentTemplatesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/component_templates/CatComponentTemplatesResponse.ts#L22-L24 type Response []types.CatComponentTemplate diff --git a/typedapi/cat/count/count.go b/typedapi/cat/count/count.go index 94900949f6..887fe9f575 100644 --- a/typedapi/cat/count/count.go +++ b/typedapi/cat/count/count.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Provides quick access to the document count of the entire cluster, or // individual indices. diff --git a/typedapi/cat/count/response.go b/typedapi/cat/count/response.go index feabbef0c3..f60f42799f 100644 --- a/typedapi/cat/count/response.go +++ b/typedapi/cat/count/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package count @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package count // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/count/CatCountResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/count/CatCountResponse.ts#L22-L24 type Response []types.CountRecord diff --git a/typedapi/cat/fielddata/fielddata.go b/typedapi/cat/fielddata/fielddata.go index b4aaf6f278..abb73b23d0 100644 --- a/typedapi/cat/fielddata/fielddata.go +++ b/typedapi/cat/fielddata/fielddata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Shows how much heap memory is currently being used by fielddata on every data // node in the cluster. diff --git a/typedapi/cat/fielddata/response.go b/typedapi/cat/fielddata/response.go index fa31d9f284..80ea4d7bfa 100644 --- a/typedapi/cat/fielddata/response.go +++ b/typedapi/cat/fielddata/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package fielddata @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package fielddata // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/fielddata/CatFielddataResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/fielddata/CatFielddataResponse.ts#L22-L24 type Response []types.FielddataRecord diff --git a/typedapi/cat/health/health.go b/typedapi/cat/health/health.go index d94c50a475..3e5aad3149 100644 --- a/typedapi/cat/health/health.go +++ b/typedapi/cat/health/health.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns a concise representation of the cluster health. package health diff --git a/typedapi/cat/health/response.go b/typedapi/cat/health/response.go index 2f0ad80497..c4fa43d57d 100644 --- a/typedapi/cat/health/response.go +++ b/typedapi/cat/health/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package health @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package health // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/health/CatHealthResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/health/CatHealthResponse.ts#L22-L24 type Response []types.HealthRecord diff --git a/typedapi/cat/help/help.go b/typedapi/cat/help/help.go index 77fc44783e..7a659df044 100644 --- a/typedapi/cat/help/help.go +++ b/typedapi/cat/help/help.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns help for the Cat APIs. package help diff --git a/typedapi/cat/help/response.go b/typedapi/cat/help/response.go index 46235e750b..0bfe20ee14 100644 --- a/typedapi/cat/help/response.go +++ b/typedapi/cat/help/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package help @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package help // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/help/CatHelpResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/help/CatHelpResponse.ts#L22-L24 type Response []types.HelpRecord diff --git a/typedapi/cat/indices/indices.go b/typedapi/cat/indices/indices.go index 05b4eeb0d6..1426cd94de 100644 --- a/typedapi/cat/indices/indices.go +++ b/typedapi/cat/indices/indices.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about indices: number of primaries and replicas, document // counts, disk size, ... diff --git a/typedapi/cat/indices/response.go b/typedapi/cat/indices/response.go index 3f5c7b1b6c..9c7f8cb749 100644 --- a/typedapi/cat/indices/response.go +++ b/typedapi/cat/indices/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package indices @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package indices // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/indices/CatIndicesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/indices/CatIndicesResponse.ts#L22-L24 type Response []types.IndicesRecord diff --git a/typedapi/cat/master/master.go b/typedapi/cat/master/master.go index 356c66f32c..393e3ac634 100644 --- a/typedapi/cat/master/master.go +++ b/typedapi/cat/master/master.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about the master node. package master diff --git a/typedapi/cat/master/response.go b/typedapi/cat/master/response.go index 6ad40a3cd5..563bff92db 100644 --- a/typedapi/cat/master/response.go +++ b/typedapi/cat/master/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package master @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package master // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/master/CatMasterResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/master/CatMasterResponse.ts#L22-L24 type Response []types.MasterRecord diff --git a/typedapi/cat/mldatafeeds/ml_datafeeds.go b/typedapi/cat/mldatafeeds/ml_datafeeds.go index 37367a88d4..41e9635f68 100644 --- a/typedapi/cat/mldatafeeds/ml_datafeeds.go +++ b/typedapi/cat/mldatafeeds/ml_datafeeds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Gets configuration and usage information about datafeeds. package mldatafeeds diff --git a/typedapi/cat/mldatafeeds/response.go b/typedapi/cat/mldatafeeds/response.go index 271f6da1ca..ba8f22b31b 100644 --- a/typedapi/cat/mldatafeeds/response.go +++ b/typedapi/cat/mldatafeeds/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package mldatafeeds @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mldatafeeds // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/ml_datafeeds/CatDatafeedsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/ml_datafeeds/CatDatafeedsResponse.ts#L22-L24 type Response []types.DatafeedsRecord diff --git a/typedapi/cat/mldataframeanalytics/ml_data_frame_analytics.go b/typedapi/cat/mldataframeanalytics/ml_data_frame_analytics.go index 23780fdd01..d2acc519be 100644 --- a/typedapi/cat/mldataframeanalytics/ml_data_frame_analytics.go +++ b/typedapi/cat/mldataframeanalytics/ml_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Gets configuration and usage information about data frame analytics jobs. package mldataframeanalytics diff --git a/typedapi/cat/mldataframeanalytics/response.go b/typedapi/cat/mldataframeanalytics/response.go index a5282ebf5a..fd0f2641c1 100644 --- a/typedapi/cat/mldataframeanalytics/response.go +++ b/typedapi/cat/mldataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package mldataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mldataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/ml_data_frame_analytics/CatDataFrameAnalyticsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/ml_data_frame_analytics/CatDataFrameAnalyticsResponse.ts#L22-L24 type Response []types.DataFrameAnalyticsRecord diff --git a/typedapi/cat/mljobs/ml_jobs.go b/typedapi/cat/mljobs/ml_jobs.go index e290d9f581..81f22d5713 100644 --- a/typedapi/cat/mljobs/ml_jobs.go +++ b/typedapi/cat/mljobs/ml_jobs.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Gets configuration and usage information about anomaly detection jobs. package mljobs diff --git a/typedapi/cat/mljobs/response.go b/typedapi/cat/mljobs/response.go index 6f4cce7ccc..70d7a09cf3 100644 --- a/typedapi/cat/mljobs/response.go +++ b/typedapi/cat/mljobs/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package mljobs @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mljobs // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/ml_jobs/CatJobsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/ml_jobs/CatJobsResponse.ts#L22-L24 type Response []types.JobsRecord diff --git a/typedapi/cat/mltrainedmodels/ml_trained_models.go b/typedapi/cat/mltrainedmodels/ml_trained_models.go index fa01b610e5..ebf74e36ad 100644 --- a/typedapi/cat/mltrainedmodels/ml_trained_models.go +++ b/typedapi/cat/mltrainedmodels/ml_trained_models.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Gets configuration and usage information about inference trained models. package mltrainedmodels diff --git a/typedapi/cat/mltrainedmodels/response.go b/typedapi/cat/mltrainedmodels/response.go index 016fd91f67..d1cd70deff 100644 --- a/typedapi/cat/mltrainedmodels/response.go +++ b/typedapi/cat/mltrainedmodels/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package mltrainedmodels @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mltrainedmodels // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/ml_trained_models/CatTrainedModelsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/ml_trained_models/CatTrainedModelsResponse.ts#L22-L24 type Response []types.TrainedModelsRecord diff --git a/typedapi/cat/nodeattrs/nodeattrs.go b/typedapi/cat/nodeattrs/nodeattrs.go index f0f3cc31ad..bbe97e1c98 100644 --- a/typedapi/cat/nodeattrs/nodeattrs.go +++ b/typedapi/cat/nodeattrs/nodeattrs.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about custom node attributes. package nodeattrs diff --git a/typedapi/cat/nodeattrs/response.go b/typedapi/cat/nodeattrs/response.go index 2ddecf51c0..9a3e8b1935 100644 --- a/typedapi/cat/nodeattrs/response.go +++ b/typedapi/cat/nodeattrs/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package nodeattrs @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package nodeattrs // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/nodeattrs/CatNodeAttributesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/nodeattrs/CatNodeAttributesResponse.ts#L22-L24 type Response []types.NodeAttributesRecord diff --git a/typedapi/cat/nodes/nodes.go b/typedapi/cat/nodes/nodes.go index 81bf16ec43..9f488e67a8 100644 --- a/typedapi/cat/nodes/nodes.go +++ b/typedapi/cat/nodes/nodes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns basic statistics about performance of cluster nodes. package nodes diff --git a/typedapi/cat/nodes/response.go b/typedapi/cat/nodes/response.go index 63cb32a846..953bbdeff5 100644 --- a/typedapi/cat/nodes/response.go +++ b/typedapi/cat/nodes/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package nodes @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package nodes // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/nodes/CatNodesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/nodes/CatNodesResponse.ts#L22-L24 type Response []types.NodesRecord diff --git a/typedapi/cat/pendingtasks/pending_tasks.go b/typedapi/cat/pendingtasks/pending_tasks.go index 458eecfaff..7bda837d55 100644 --- a/typedapi/cat/pendingtasks/pending_tasks.go +++ b/typedapi/cat/pendingtasks/pending_tasks.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns a concise representation of the cluster pending tasks. package pendingtasks diff --git a/typedapi/cat/pendingtasks/response.go b/typedapi/cat/pendingtasks/response.go index 87573ec352..30e0076c15 100644 --- a/typedapi/cat/pendingtasks/response.go +++ b/typedapi/cat/pendingtasks/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package pendingtasks @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package pendingtasks // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/pending_tasks/CatPendingTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/pending_tasks/CatPendingTasksResponse.ts#L22-L24 type Response []types.PendingTasksRecord diff --git a/typedapi/cat/plugins/plugins.go b/typedapi/cat/plugins/plugins.go index 518ab0c2ea..12eef185dc 100644 --- a/typedapi/cat/plugins/plugins.go +++ b/typedapi/cat/plugins/plugins.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about installed plugins across nodes node. package plugins diff --git a/typedapi/cat/plugins/response.go b/typedapi/cat/plugins/response.go index 66b21fc615..218179b4a1 100644 --- a/typedapi/cat/plugins/response.go +++ b/typedapi/cat/plugins/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package plugins @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package plugins // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/plugins/CatPluginsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/plugins/CatPluginsResponse.ts#L22-L24 type Response []types.PluginsRecord diff --git a/typedapi/cat/recovery/recovery.go b/typedapi/cat/recovery/recovery.go index 168df187e3..17751faca8 100644 --- a/typedapi/cat/recovery/recovery.go +++ b/typedapi/cat/recovery/recovery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about index shard recoveries, both on-going completed. package recovery diff --git a/typedapi/cat/recovery/response.go b/typedapi/cat/recovery/response.go index 141dfc34c5..22f5117bb2 100644 --- a/typedapi/cat/recovery/response.go +++ b/typedapi/cat/recovery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package recovery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package recovery // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/recovery/CatRecoveryResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/recovery/CatRecoveryResponse.ts#L22-L24 type Response []types.RecoveryRecord diff --git a/typedapi/cat/repositories/repositories.go b/typedapi/cat/repositories/repositories.go index e0659f7e7e..97fca56fb1 100644 --- a/typedapi/cat/repositories/repositories.go +++ b/typedapi/cat/repositories/repositories.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about snapshot repositories registered in the cluster. package repositories diff --git a/typedapi/cat/repositories/response.go b/typedapi/cat/repositories/response.go index 5d20aebeec..dbdf00d6b2 100644 --- a/typedapi/cat/repositories/response.go +++ b/typedapi/cat/repositories/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package repositories @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package repositories // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/repositories/CatRepositoriesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/repositories/CatRepositoriesResponse.ts#L22-L24 type Response []types.RepositoriesRecord diff --git a/typedapi/cat/segments/response.go b/typedapi/cat/segments/response.go index a31309fa00..8a8c576ec3 100644 --- a/typedapi/cat/segments/response.go +++ b/typedapi/cat/segments/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package segments @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package segments // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/segments/CatSegmentsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/segments/CatSegmentsResponse.ts#L22-L24 type Response []types.SegmentsRecord diff --git a/typedapi/cat/segments/segments.go b/typedapi/cat/segments/segments.go index 772c1147c5..6136ffbbb8 100644 --- a/typedapi/cat/segments/segments.go +++ b/typedapi/cat/segments/segments.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Provides low-level information about the segments in the shards of an index. package segments diff --git a/typedapi/cat/shards/response.go b/typedapi/cat/shards/response.go index abe6336a3e..93bcdd400a 100644 --- a/typedapi/cat/shards/response.go +++ b/typedapi/cat/shards/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package shards @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package shards // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/shards/CatShardsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/shards/CatShardsResponse.ts#L22-L24 type Response []types.ShardsRecord diff --git a/typedapi/cat/shards/shards.go b/typedapi/cat/shards/shards.go index 8087cbdf1a..a3a14bf6ec 100644 --- a/typedapi/cat/shards/shards.go +++ b/typedapi/cat/shards/shards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Provides a detailed view of shard allocation on nodes. package shards diff --git a/typedapi/cat/snapshots/response.go b/typedapi/cat/snapshots/response.go index 932114b92c..d0a1189fc8 100644 --- a/typedapi/cat/snapshots/response.go +++ b/typedapi/cat/snapshots/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package snapshots @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package snapshots // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/snapshots/CatSnapshotsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/snapshots/CatSnapshotsResponse.ts#L22-L24 type Response []types.SnapshotsRecord diff --git a/typedapi/cat/snapshots/snapshots.go b/typedapi/cat/snapshots/snapshots.go index c9eb0b0cf8..700b173dee 100644 --- a/typedapi/cat/snapshots/snapshots.go +++ b/typedapi/cat/snapshots/snapshots.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns all snapshots in a specific repository. package snapshots diff --git a/typedapi/cat/tasks/response.go b/typedapi/cat/tasks/response.go index 0375b7ddbf..993cced0c1 100644 --- a/typedapi/cat/tasks/response.go +++ b/typedapi/cat/tasks/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package tasks @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package tasks // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/tasks/CatTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/tasks/CatTasksResponse.ts#L22-L24 type Response []types.TasksRecord diff --git a/typedapi/cat/tasks/tasks.go b/typedapi/cat/tasks/tasks.go index db47a1ded4..4f591f986b 100644 --- a/typedapi/cat/tasks/tasks.go +++ b/typedapi/cat/tasks/tasks.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about the tasks currently executing on one or more nodes // in the cluster. diff --git a/typedapi/cat/templates/response.go b/typedapi/cat/templates/response.go index 689a612897..0d4767add9 100644 --- a/typedapi/cat/templates/response.go +++ b/typedapi/cat/templates/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package templates @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package templates // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/templates/CatTemplatesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/templates/CatTemplatesResponse.ts#L22-L24 type Response []types.TemplatesRecord diff --git a/typedapi/cat/templates/templates.go b/typedapi/cat/templates/templates.go index 918f2c774d..113f6e4810 100644 --- a/typedapi/cat/templates/templates.go +++ b/typedapi/cat/templates/templates.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about existing templates. package templates diff --git a/typedapi/cat/threadpool/response.go b/typedapi/cat/threadpool/response.go index be193066d2..90ad105a43 100644 --- a/typedapi/cat/threadpool/response.go +++ b/typedapi/cat/threadpool/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package threadpool @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package threadpool // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/thread_pool/CatThreadPoolResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/thread_pool/CatThreadPoolResponse.ts#L22-L24 type Response []types.ThreadPoolRecord diff --git a/typedapi/cat/threadpool/thread_pool.go b/typedapi/cat/threadpool/thread_pool.go index 754ca02525..3d687f5aae 100644 --- a/typedapi/cat/threadpool/thread_pool.go +++ b/typedapi/cat/threadpool/thread_pool.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns cluster-wide thread pool statistics per node. // By default the active, queue and rejected statistics are returned for all diff --git a/typedapi/cat/transforms/response.go b/typedapi/cat/transforms/response.go index 2c51cff786..b9016f004e 100644 --- a/typedapi/cat/transforms/response.go +++ b/typedapi/cat/transforms/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package transforms @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package transforms // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/transforms/CatTransformsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/transforms/CatTransformsResponse.ts#L22-L24 type Response []types.TransformsRecord diff --git a/typedapi/cat/transforms/transforms.go b/typedapi/cat/transforms/transforms.go index 28776c4bb2..04f096910f 100644 --- a/typedapi/cat/transforms/transforms.go +++ b/typedapi/cat/transforms/transforms.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Gets configuration and usage information about transforms. package transforms diff --git a/typedapi/ccr/deleteautofollowpattern/delete_auto_follow_pattern.go b/typedapi/ccr/deleteautofollowpattern/delete_auto_follow_pattern.go index 1a1640448c..459147c7d7 100644 --- a/typedapi/ccr/deleteautofollowpattern/delete_auto_follow_pattern.go +++ b/typedapi/ccr/deleteautofollowpattern/delete_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes auto-follow patterns. package deleteautofollowpattern diff --git a/typedapi/ccr/deleteautofollowpattern/response.go b/typedapi/ccr/deleteautofollowpattern/response.go index af55a4a882..211d455fc3 100644 --- a/typedapi/ccr/deleteautofollowpattern/response.go +++ b/typedapi/ccr/deleteautofollowpattern/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deleteautofollowpattern // Response holds the response body struct for the package deleteautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/delete_auto_follow_pattern/DeleteAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/delete_auto_follow_pattern/DeleteAutoFollowPatternResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/follow/follow.go b/typedapi/ccr/follow/follow.go index 2fad07faa2..da6e3b5288 100644 --- a/typedapi/ccr/follow/follow.go +++ b/typedapi/ccr/follow/follow.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a new follower index configured to follow the referenced leader // index. diff --git a/typedapi/ccr/follow/request.go b/typedapi/ccr/follow/request.go index 46c62f6f04..4d791d8286 100644 --- a/typedapi/ccr/follow/request.go +++ b/typedapi/ccr/follow/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package follow @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package follow // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/follow/CreateFollowIndexRequest.ts#L25-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/follow/CreateFollowIndexRequest.ts#L25-L51 type Request struct { LeaderIndex *string `json:"leader_index,omitempty"` MaxOutstandingReadRequests *int64 `json:"max_outstanding_read_requests,omitempty"` @@ -83,7 +83,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "leader_index": if err := dec.Decode(&s.LeaderIndex); err != nil { - return err + return fmt.Errorf("%s | %w", "LeaderIndex", err) } case "max_outstanding_read_requests": @@ -93,7 +93,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOutstandingReadRequests", err) } s.MaxOutstandingReadRequests = &value case float64: @@ -108,7 +108,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOutstandingWriteRequests", err) } s.MaxOutstandingWriteRequests = &value case float64: @@ -123,7 +123,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxReadRequestOperationCount", err) } s.MaxReadRequestOperationCount = &value case float64: @@ -134,7 +134,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_read_request_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxReadRequestSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -145,7 +145,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_retry_delay": if err := dec.Decode(&s.MaxRetryDelay); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxRetryDelay", err) } case "max_write_buffer_count": @@ -155,7 +155,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteBufferCount", err) } s.MaxWriteBufferCount = &value case float64: @@ -166,7 +166,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_write_buffer_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteBufferSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -182,7 +182,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteRequestOperationCount", err) } s.MaxWriteRequestOperationCount = &value case float64: @@ -193,7 +193,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_write_request_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteRequestSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -204,13 +204,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "read_poll_timeout": if err := dec.Decode(&s.ReadPollTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "ReadPollTimeout", err) } case "remote_cluster": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RemoteCluster", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/ccr/follow/response.go b/typedapi/ccr/follow/response.go index 410d64b4bc..b4543b5e99 100644 --- a/typedapi/ccr/follow/response.go +++ b/typedapi/ccr/follow/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package follow // Response holds the response body struct for the package follow // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/follow/CreateFollowIndexResponse.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/follow/CreateFollowIndexResponse.ts#L20-L26 type Response struct { FollowIndexCreated bool `json:"follow_index_created"` FollowIndexShardsAcked bool `json:"follow_index_shards_acked"` diff --git a/typedapi/ccr/followinfo/follow_info.go b/typedapi/ccr/followinfo/follow_info.go index 1604af533f..c967b14af1 100644 --- a/typedapi/ccr/followinfo/follow_info.go +++ b/typedapi/ccr/followinfo/follow_info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information about all follower indices, including parameters and // status for each follower index diff --git a/typedapi/ccr/followinfo/response.go b/typedapi/ccr/followinfo/response.go index bb95ca9837..03e6a0dedd 100644 --- a/typedapi/ccr/followinfo/response.go +++ b/typedapi/ccr/followinfo/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package followinfo @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package followinfo // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/follow_info/FollowInfoResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/follow_info/FollowInfoResponse.ts#L22-L24 type Response struct { FollowerIndices []types.FollowerIndex `json:"follower_indices"` } diff --git a/typedapi/ccr/followstats/follow_stats.go b/typedapi/ccr/followstats/follow_stats.go index 23131602e3..9f9378ffc3 100644 --- a/typedapi/ccr/followstats/follow_stats.go +++ b/typedapi/ccr/followstats/follow_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves follower stats. return shard-level stats about the following tasks // associated with each shard for the specified indices. diff --git a/typedapi/ccr/followstats/response.go b/typedapi/ccr/followstats/response.go index 4af94ea3fd..17ff0d4ad3 100644 --- a/typedapi/ccr/followstats/response.go +++ b/typedapi/ccr/followstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package followstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package followstats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/follow_stats/FollowIndexStatsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/follow_stats/FollowIndexStatsResponse.ts#L22-L24 type Response struct { Indices []types.FollowIndexStats `json:"indices"` } diff --git a/typedapi/ccr/forgetfollower/forget_follower.go b/typedapi/ccr/forgetfollower/forget_follower.go index 9323a7af14..5bdba0f922 100644 --- a/typedapi/ccr/forgetfollower/forget_follower.go +++ b/typedapi/ccr/forgetfollower/forget_follower.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Removes the follower retention leases from the leader. package forgetfollower diff --git a/typedapi/ccr/forgetfollower/request.go b/typedapi/ccr/forgetfollower/request.go index 4c0c78173e..6265adff69 100644 --- a/typedapi/ccr/forgetfollower/request.go +++ b/typedapi/ccr/forgetfollower/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package forgetfollower @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package forgetfollower // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/forget_follower/ForgetFollowerIndexRequest.ts#L23-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/forget_follower/ForgetFollowerIndexRequest.ts#L23-L38 type Request struct { FollowerCluster *string `json:"follower_cluster,omitempty"` FollowerIndex *string `json:"follower_index,omitempty"` @@ -74,7 +74,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "follower_cluster": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FollowerCluster", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,18 +85,18 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "follower_index": if err := dec.Decode(&s.FollowerIndex); err != nil { - return err + return fmt.Errorf("%s | %w", "FollowerIndex", err) } case "follower_index_uuid": if err := dec.Decode(&s.FollowerIndexUuid); err != nil { - return err + return fmt.Errorf("%s | %w", "FollowerIndexUuid", err) } case "leader_remote_cluster": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LeaderRemoteCluster", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/ccr/forgetfollower/response.go b/typedapi/ccr/forgetfollower/response.go index 71363ff194..9f28e7879b 100644 --- a/typedapi/ccr/forgetfollower/response.go +++ b/typedapi/ccr/forgetfollower/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package forgetfollower @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package forgetfollower // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/forget_follower/ForgetFollowerIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/forget_follower/ForgetFollowerIndexResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` } diff --git a/typedapi/ccr/getautofollowpattern/get_auto_follow_pattern.go b/typedapi/ccr/getautofollowpattern/get_auto_follow_pattern.go index 7500a549d7..6e5ee7295a 100644 --- a/typedapi/ccr/getautofollowpattern/get_auto_follow_pattern.go +++ b/typedapi/ccr/getautofollowpattern/get_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Gets configured auto-follow patterns. Returns the specified auto-follow // pattern collection. diff --git a/typedapi/ccr/getautofollowpattern/response.go b/typedapi/ccr/getautofollowpattern/response.go index 149a78a5b2..6070644116 100644 --- a/typedapi/ccr/getautofollowpattern/response.go +++ b/typedapi/ccr/getautofollowpattern/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getautofollowpattern @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/get_auto_follow_pattern/GetAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/get_auto_follow_pattern/GetAutoFollowPatternResponse.ts#L22-L24 type Response struct { Patterns []types.AutoFollowPattern `json:"patterns"` } diff --git a/typedapi/ccr/pauseautofollowpattern/pause_auto_follow_pattern.go b/typedapi/ccr/pauseautofollowpattern/pause_auto_follow_pattern.go index c07c32caa7..978cb073f0 100644 --- a/typedapi/ccr/pauseautofollowpattern/pause_auto_follow_pattern.go +++ b/typedapi/ccr/pauseautofollowpattern/pause_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Pauses an auto-follow pattern package pauseautofollowpattern diff --git a/typedapi/ccr/pauseautofollowpattern/response.go b/typedapi/ccr/pauseautofollowpattern/response.go index 35e2baa799..ec8a37c02e 100644 --- a/typedapi/ccr/pauseautofollowpattern/response.go +++ b/typedapi/ccr/pauseautofollowpattern/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package pauseautofollowpattern // Response holds the response body struct for the package pauseautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/pause_auto_follow_pattern/PauseAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/pause_auto_follow_pattern/PauseAutoFollowPatternResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/pausefollow/pause_follow.go b/typedapi/ccr/pausefollow/pause_follow.go index b876988cff..10813967dd 100644 --- a/typedapi/ccr/pausefollow/pause_follow.go +++ b/typedapi/ccr/pausefollow/pause_follow.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Pauses a follower index. The follower index will not fetch any additional // operations from the leader index. diff --git a/typedapi/ccr/pausefollow/response.go b/typedapi/ccr/pausefollow/response.go index f80a707ee2..28786d23c7 100644 --- a/typedapi/ccr/pausefollow/response.go +++ b/typedapi/ccr/pausefollow/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package pausefollow // Response holds the response body struct for the package pausefollow // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/pause_follow/PauseFollowIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/pause_follow/PauseFollowIndexResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/putautofollowpattern/put_auto_follow_pattern.go b/typedapi/ccr/putautofollowpattern/put_auto_follow_pattern.go index 98d254a24f..8152aab209 100644 --- a/typedapi/ccr/putautofollowpattern/put_auto_follow_pattern.go +++ b/typedapi/ccr/putautofollowpattern/put_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a new named collection of auto-follow patterns against a specified // remote cluster. Newly created indices on the remote cluster matching any of diff --git a/typedapi/ccr/putautofollowpattern/request.go b/typedapi/ccr/putautofollowpattern/request.go index 1619d74ac5..b065202328 100644 --- a/typedapi/ccr/putautofollowpattern/request.go +++ b/typedapi/ccr/putautofollowpattern/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putautofollowpattern @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/put_auto_follow_pattern/PutAutoFollowPatternRequest.ts#L27-L112 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/put_auto_follow_pattern/PutAutoFollowPatternRequest.ts#L27-L112 type Request struct { // FollowIndexPattern The name of follower index. The template {{leader_index}} can be used to @@ -124,17 +124,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "follow_index_pattern": if err := dec.Decode(&s.FollowIndexPattern); err != nil { - return err + return fmt.Errorf("%s | %w", "FollowIndexPattern", err) } case "leader_index_exclusion_patterns": if err := dec.Decode(&s.LeaderIndexExclusionPatterns); err != nil { - return err + return fmt.Errorf("%s | %w", "LeaderIndexExclusionPatterns", err) } case "leader_index_patterns": if err := dec.Decode(&s.LeaderIndexPatterns); err != nil { - return err + return fmt.Errorf("%s | %w", "LeaderIndexPatterns", err) } case "max_outstanding_read_requests": @@ -145,7 +145,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOutstandingReadRequests", err) } s.MaxOutstandingReadRequests = &value case float64: @@ -161,7 +161,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOutstandingWriteRequests", err) } s.MaxOutstandingWriteRequests = &value case float64: @@ -177,7 +177,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxReadRequestOperationCount", err) } s.MaxReadRequestOperationCount = &value case float64: @@ -187,12 +187,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_read_request_size": if err := dec.Decode(&s.MaxReadRequestSize); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxReadRequestSize", err) } case "max_retry_delay": if err := dec.Decode(&s.MaxRetryDelay); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxRetryDelay", err) } case "max_write_buffer_count": @@ -203,7 +203,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteBufferCount", err) } s.MaxWriteBufferCount = &value case float64: @@ -213,7 +213,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_write_buffer_size": if err := dec.Decode(&s.MaxWriteBufferSize); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteBufferSize", err) } case "max_write_request_operation_count": @@ -224,7 +224,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteRequestOperationCount", err) } s.MaxWriteRequestOperationCount = &value case float64: @@ -234,18 +234,18 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_write_request_size": if err := dec.Decode(&s.MaxWriteRequestSize); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteRequestSize", err) } case "read_poll_timeout": if err := dec.Decode(&s.ReadPollTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "ReadPollTimeout", err) } case "remote_cluster": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RemoteCluster", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -259,7 +259,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Settings = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } } diff --git a/typedapi/ccr/putautofollowpattern/response.go b/typedapi/ccr/putautofollowpattern/response.go index 0c2bce2213..d8934ff473 100644 --- a/typedapi/ccr/putautofollowpattern/response.go +++ b/typedapi/ccr/putautofollowpattern/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putautofollowpattern // Response holds the response body struct for the package putautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/put_auto_follow_pattern/PutAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/put_auto_follow_pattern/PutAutoFollowPatternResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/resumeautofollowpattern/response.go b/typedapi/ccr/resumeautofollowpattern/response.go index ffb4e04037..6beadd2979 100644 --- a/typedapi/ccr/resumeautofollowpattern/response.go +++ b/typedapi/ccr/resumeautofollowpattern/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package resumeautofollowpattern // Response holds the response body struct for the package resumeautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/resume_auto_follow_pattern/ResumeAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/resume_auto_follow_pattern/ResumeAutoFollowPatternResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/resumeautofollowpattern/resume_auto_follow_pattern.go b/typedapi/ccr/resumeautofollowpattern/resume_auto_follow_pattern.go index d00954a6a5..7c56327ff5 100644 --- a/typedapi/ccr/resumeautofollowpattern/resume_auto_follow_pattern.go +++ b/typedapi/ccr/resumeautofollowpattern/resume_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Resumes an auto-follow pattern that has been paused package resumeautofollowpattern diff --git a/typedapi/ccr/resumefollow/request.go b/typedapi/ccr/resumefollow/request.go index dfe11a3130..6e86477882 100644 --- a/typedapi/ccr/resumefollow/request.go +++ b/typedapi/ccr/resumefollow/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package resumefollow @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package resumefollow // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/resume_follow/ResumeFollowIndexRequest.ts#L25-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/resume_follow/ResumeFollowIndexRequest.ts#L25-L46 type Request struct { MaxOutstandingReadRequests *int64 `json:"max_outstanding_read_requests,omitempty"` MaxOutstandingWriteRequests *int64 `json:"max_outstanding_write_requests,omitempty"` @@ -86,7 +86,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOutstandingReadRequests", err) } s.MaxOutstandingReadRequests = &value case float64: @@ -101,7 +101,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOutstandingWriteRequests", err) } s.MaxOutstandingWriteRequests = &value case float64: @@ -116,7 +116,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxReadRequestOperationCount", err) } s.MaxReadRequestOperationCount = &value case float64: @@ -127,7 +127,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_read_request_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxReadRequestSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -138,7 +138,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_retry_delay": if err := dec.Decode(&s.MaxRetryDelay); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxRetryDelay", err) } case "max_write_buffer_count": @@ -148,7 +148,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteBufferCount", err) } s.MaxWriteBufferCount = &value case float64: @@ -159,7 +159,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_write_buffer_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteBufferSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -175,7 +175,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteRequestOperationCount", err) } s.MaxWriteRequestOperationCount = &value case float64: @@ -186,7 +186,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_write_request_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteRequestSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -197,7 +197,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "read_poll_timeout": if err := dec.Decode(&s.ReadPollTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "ReadPollTimeout", err) } } diff --git a/typedapi/ccr/resumefollow/response.go b/typedapi/ccr/resumefollow/response.go index f58a2bc43e..284db7abd8 100644 --- a/typedapi/ccr/resumefollow/response.go +++ b/typedapi/ccr/resumefollow/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package resumefollow // Response holds the response body struct for the package resumefollow // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/resume_follow/ResumeFollowIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/resume_follow/ResumeFollowIndexResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/resumefollow/resume_follow.go b/typedapi/ccr/resumefollow/resume_follow.go index d86868a4d5..813a5b02a6 100644 --- a/typedapi/ccr/resumefollow/resume_follow.go +++ b/typedapi/ccr/resumefollow/resume_follow.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Resumes a follower index that has been paused package resumefollow diff --git a/typedapi/ccr/stats/response.go b/typedapi/ccr/stats/response.go index 8df2054398..fbd8a4d1d0 100644 --- a/typedapi/ccr/stats/response.go +++ b/typedapi/ccr/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/stats/CcrStatsResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/stats/CcrStatsResponse.ts#L22-L27 type Response struct { AutoFollowStats types.AutoFollowStats `json:"auto_follow_stats"` FollowStats types.FollowStats `json:"follow_stats"` diff --git a/typedapi/ccr/stats/stats.go b/typedapi/ccr/stats/stats.go index 3393a657ed..08e8800dd4 100644 --- a/typedapi/ccr/stats/stats.go +++ b/typedapi/ccr/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Gets all stats related to cross-cluster replication. package stats diff --git a/typedapi/ccr/unfollow/response.go b/typedapi/ccr/unfollow/response.go index 0fa837deb2..66c467db0b 100644 --- a/typedapi/ccr/unfollow/response.go +++ b/typedapi/ccr/unfollow/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package unfollow // Response holds the response body struct for the package unfollow // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/unfollow/UnfollowIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/unfollow/UnfollowIndexResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/unfollow/unfollow.go b/typedapi/ccr/unfollow/unfollow.go index 250f86c7a0..f5b0173b37 100644 --- a/typedapi/ccr/unfollow/unfollow.go +++ b/typedapi/ccr/unfollow/unfollow.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Stops the following task associated with a follower index and removes index // metadata and settings associated with cross-cluster replication. diff --git a/typedapi/cluster/allocationexplain/allocation_explain.go b/typedapi/cluster/allocationexplain/allocation_explain.go index 5a902f27a0..bb5c1b1e04 100644 --- a/typedapi/cluster/allocationexplain/allocation_explain.go +++ b/typedapi/cluster/allocationexplain/allocation_explain.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Provides explanations for shard allocations in the cluster. package allocationexplain diff --git a/typedapi/cluster/allocationexplain/request.go b/typedapi/cluster/allocationexplain/request.go index 6f83ef64ce..7ef5430591 100644 --- a/typedapi/cluster/allocationexplain/request.go +++ b/typedapi/cluster/allocationexplain/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package allocationexplain @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package allocationexplain // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/ClusterAllocationExplainRequest.ts#L24-L61 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/ClusterAllocationExplainRequest.ts#L24-L61 type Request struct { // CurrentNode Specifies the node ID or the name of the node to only explain a shard that is @@ -80,7 +80,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "current_node": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CurrentNode", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -91,7 +91,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "primary": @@ -101,7 +101,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Primary", err) } s.Primary = &value case bool: @@ -116,7 +116,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Shard", err) } s.Shard = &value case float64: diff --git a/typedapi/cluster/allocationexplain/response.go b/typedapi/cluster/allocationexplain/response.go index d9021669ae..e31b447d1f 100644 --- a/typedapi/cluster/allocationexplain/response.go +++ b/typedapi/cluster/allocationexplain/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package allocationexplain @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package allocationexplain // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/ClusterAllocationExplainResponse.ts#L32-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/ClusterAllocationExplainResponse.ts#L32-L64 type Response struct { AllocateExplanation *string `json:"allocate_explanation,omitempty"` AllocationDelay types.Duration `json:"allocation_delay,omitempty"` diff --git a/typedapi/cluster/deletecomponenttemplate/delete_component_template.go b/typedapi/cluster/deletecomponenttemplate/delete_component_template.go index 7aa139be16..a17c313d40 100644 --- a/typedapi/cluster/deletecomponenttemplate/delete_component_template.go +++ b/typedapi/cluster/deletecomponenttemplate/delete_component_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes a component template package deletecomponenttemplate diff --git a/typedapi/cluster/deletecomponenttemplate/response.go b/typedapi/cluster/deletecomponenttemplate/response.go index cfcfe96194..c5d9e71b7e 100644 --- a/typedapi/cluster/deletecomponenttemplate/response.go +++ b/typedapi/cluster/deletecomponenttemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletecomponenttemplate // Response holds the response body struct for the package deletecomponenttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/delete_component_template/ClusterDeleteComponentTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/delete_component_template/ClusterDeleteComponentTemplateResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/cluster/deletevotingconfigexclusions/delete_voting_config_exclusions.go b/typedapi/cluster/deletevotingconfigexclusions/delete_voting_config_exclusions.go index 8a8a24eaaa..550a1d5064 100644 --- a/typedapi/cluster/deletevotingconfigexclusions/delete_voting_config_exclusions.go +++ b/typedapi/cluster/deletevotingconfigexclusions/delete_voting_config_exclusions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Clears cluster voting config exclusions. package deletevotingconfigexclusions diff --git a/typedapi/cluster/existscomponenttemplate/exists_component_template.go b/typedapi/cluster/existscomponenttemplate/exists_component_template.go index 53122b06cc..f120f6c471 100644 --- a/typedapi/cluster/existscomponenttemplate/exists_component_template.go +++ b/typedapi/cluster/existscomponenttemplate/exists_component_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about whether a particular component template exist package existscomponenttemplate diff --git a/typedapi/cluster/getcomponenttemplate/get_component_template.go b/typedapi/cluster/getcomponenttemplate/get_component_template.go index 49183bbab3..f16ebd899d 100644 --- a/typedapi/cluster/getcomponenttemplate/get_component_template.go +++ b/typedapi/cluster/getcomponenttemplate/get_component_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns one or more component templates package getcomponenttemplate diff --git a/typedapi/cluster/getcomponenttemplate/response.go b/typedapi/cluster/getcomponenttemplate/response.go index 4cf19792f1..b3e3584a71 100644 --- a/typedapi/cluster/getcomponenttemplate/response.go +++ b/typedapi/cluster/getcomponenttemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getcomponenttemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getcomponenttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/get_component_template/ClusterGetComponentTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/get_component_template/ClusterGetComponentTemplateResponse.ts#L22-L24 type Response struct { ComponentTemplates []types.ClusterComponentTemplate `json:"component_templates"` } diff --git a/typedapi/cluster/getsettings/get_settings.go b/typedapi/cluster/getsettings/get_settings.go index 2db963f38b..6eb39dac28 100644 --- a/typedapi/cluster/getsettings/get_settings.go +++ b/typedapi/cluster/getsettings/get_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns cluster settings. package getsettings diff --git a/typedapi/cluster/getsettings/response.go b/typedapi/cluster/getsettings/response.go index 87d557b572..5f132064bb 100644 --- a/typedapi/cluster/getsettings/response.go +++ b/typedapi/cluster/getsettings/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getsettings @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getsettings // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/get_settings/ClusterGetSettingsResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/get_settings/ClusterGetSettingsResponse.ts#L23-L29 type Response struct { Defaults map[string]json.RawMessage `json:"defaults,omitempty"` Persistent map[string]json.RawMessage `json:"persistent"` diff --git a/typedapi/cluster/health/health.go b/typedapi/cluster/health/health.go index feafe0da39..70db1baea1 100644 --- a/typedapi/cluster/health/health.go +++ b/typedapi/cluster/health/health.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns basic information about the health of the cluster. package health diff --git a/typedapi/cluster/health/response.go b/typedapi/cluster/health/response.go index 89ea5bc1a7..5efded1ab8 100644 --- a/typedapi/cluster/health/response.go +++ b/typedapi/cluster/health/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package health @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package health // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/health/ClusterHealthResponse.ts#L26-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/health/ClusterHealthResponse.ts#L26-L37 type Response struct { // ActivePrimaryShards The number of active primary shards. diff --git a/typedapi/cluster/info/info.go b/typedapi/cluster/info/info.go index a54d69986b..0a6d06c3d8 100644 --- a/typedapi/cluster/info/info.go +++ b/typedapi/cluster/info/info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns different information about the cluster. package info diff --git a/typedapi/cluster/info/response.go b/typedapi/cluster/info/response.go index 654f985059..0aa43c2893 100644 --- a/typedapi/cluster/info/response.go +++ b/typedapi/cluster/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/info/ClusterInfoResponse.ts#L26-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/info/ClusterInfoResponse.ts#L26-L34 type Response struct { ClusterName string `json:"cluster_name"` Http *types.Http `json:"http,omitempty"` diff --git a/typedapi/cluster/pendingtasks/pending_tasks.go b/typedapi/cluster/pendingtasks/pending_tasks.go index 7f0651d328..8f5b8d926b 100644 --- a/typedapi/cluster/pendingtasks/pending_tasks.go +++ b/typedapi/cluster/pendingtasks/pending_tasks.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns a list of any cluster-level changes (e.g. create index, update // mapping, diff --git a/typedapi/cluster/pendingtasks/response.go b/typedapi/cluster/pendingtasks/response.go index ba408a2513..c85547ebb7 100644 --- a/typedapi/cluster/pendingtasks/response.go +++ b/typedapi/cluster/pendingtasks/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package pendingtasks @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package pendingtasks // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/pending_tasks/ClusterPendingTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/pending_tasks/ClusterPendingTasksResponse.ts#L22-L24 type Response struct { Tasks []types.PendingTask `json:"tasks"` } diff --git a/typedapi/cluster/postvotingconfigexclusions/post_voting_config_exclusions.go b/typedapi/cluster/postvotingconfigexclusions/post_voting_config_exclusions.go index 03b8f9c7ee..4022720ca4 100644 --- a/typedapi/cluster/postvotingconfigexclusions/post_voting_config_exclusions.go +++ b/typedapi/cluster/postvotingconfigexclusions/post_voting_config_exclusions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates the cluster voting config exclusions by node ids or node names. package postvotingconfigexclusions diff --git a/typedapi/cluster/putcomponenttemplate/put_component_template.go b/typedapi/cluster/putcomponenttemplate/put_component_template.go index 782caf8bfd..3c752cf23b 100644 --- a/typedapi/cluster/putcomponenttemplate/put_component_template.go +++ b/typedapi/cluster/putcomponenttemplate/put_component_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates or updates a component template package putcomponenttemplate diff --git a/typedapi/cluster/putcomponenttemplate/request.go b/typedapi/cluster/putcomponenttemplate/request.go index 5fb845d82c..9551bb93d2 100644 --- a/typedapi/cluster/putcomponenttemplate/request.go +++ b/typedapi/cluster/putcomponenttemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putcomponenttemplate @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putcomponenttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/put_component_template/ClusterPutComponentTemplateRequest.ts#L29-L99 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/put_component_template/ClusterPutComponentTemplateRequest.ts#L29-L99 type Request struct { // AllowAutoCreate This setting overrides the value of the `action.auto_create_index` cluster @@ -100,7 +100,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowAutoCreate", err) } s.AllowAutoCreate = &value case bool: @@ -109,17 +109,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "template": if err := dec.Decode(&s.Template); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/cluster/putcomponenttemplate/response.go b/typedapi/cluster/putcomponenttemplate/response.go index dd22446b72..23b9c96323 100644 --- a/typedapi/cluster/putcomponenttemplate/response.go +++ b/typedapi/cluster/putcomponenttemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putcomponenttemplate // Response holds the response body struct for the package putcomponenttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/put_component_template/ClusterPutComponentTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/put_component_template/ClusterPutComponentTemplateResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/cluster/putsettings/put_settings.go b/typedapi/cluster/putsettings/put_settings.go index e5a63f1bc8..6af3107d20 100644 --- a/typedapi/cluster/putsettings/put_settings.go +++ b/typedapi/cluster/putsettings/put_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates the cluster settings. package putsettings diff --git a/typedapi/cluster/putsettings/request.go b/typedapi/cluster/putsettings/request.go index b8421a22c5..d1dcf01174 100644 --- a/typedapi/cluster/putsettings/request.go +++ b/typedapi/cluster/putsettings/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putsettings @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package putsettings // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/put_settings/ClusterPutSettingsRequest.ts#L25-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/put_settings/ClusterPutSettingsRequest.ts#L25-L43 type Request struct { Persistent map[string]json.RawMessage `json:"persistent,omitempty"` Transient map[string]json.RawMessage `json:"transient,omitempty"` diff --git a/typedapi/cluster/putsettings/response.go b/typedapi/cluster/putsettings/response.go index f7e30a9589..571a1ce346 100644 --- a/typedapi/cluster/putsettings/response.go +++ b/typedapi/cluster/putsettings/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putsettings @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putsettings // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/put_settings/ClusterPutSettingsResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/put_settings/ClusterPutSettingsResponse.ts#L23-L29 type Response struct { Acknowledged bool `json:"acknowledged"` Persistent map[string]json.RawMessage `json:"persistent"` diff --git a/typedapi/cluster/remoteinfo/remote_info.go b/typedapi/cluster/remoteinfo/remote_info.go index 2a31a5cac5..826ff88440 100644 --- a/typedapi/cluster/remoteinfo/remote_info.go +++ b/typedapi/cluster/remoteinfo/remote_info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the information about configured remote clusters. package remoteinfo diff --git a/typedapi/cluster/remoteinfo/response.go b/typedapi/cluster/remoteinfo/response.go index 96340fa92a..741e8165dd 100644 --- a/typedapi/cluster/remoteinfo/response.go +++ b/typedapi/cluster/remoteinfo/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package remoteinfo @@ -32,7 +32,7 @@ import ( // Response holds the response body struct for the package remoteinfo // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L24-L26 type Response map[string]types.ClusterRemoteInfo diff --git a/typedapi/cluster/reroute/request.go b/typedapi/cluster/reroute/request.go index cf57b8762e..15e5a97999 100644 --- a/typedapi/cluster/reroute/request.go +++ b/typedapi/cluster/reroute/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package reroute @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package reroute // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/reroute/ClusterRerouteRequest.ts#L25-L70 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/reroute/ClusterRerouteRequest.ts#L25-L70 type Request struct { // Commands Defines the commands to perform. diff --git a/typedapi/cluster/reroute/reroute.go b/typedapi/cluster/reroute/reroute.go index 23a76d3503..c17738bfda 100644 --- a/typedapi/cluster/reroute/reroute.go +++ b/typedapi/cluster/reroute/reroute.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows to manually change the allocation of individual shards in the cluster. package reroute diff --git a/typedapi/cluster/reroute/response.go b/typedapi/cluster/reroute/response.go index 39cad112dc..1694ab86fb 100644 --- a/typedapi/cluster/reroute/response.go +++ b/typedapi/cluster/reroute/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package reroute @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package reroute // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/reroute/ClusterRerouteResponse.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/reroute/ClusterRerouteResponse.ts#L23-L34 type Response struct { Acknowledged bool `json:"acknowledged"` Explanations []types.RerouteExplanation `json:"explanations,omitempty"` diff --git a/typedapi/cluster/state/response.go b/typedapi/cluster/state/response.go index f848ff78e1..2abc39b8bb 100644 --- a/typedapi/cluster/state/response.go +++ b/typedapi/cluster/state/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package state @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package state // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/state/ClusterStateResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/state/ClusterStateResponse.ts#L22-L29 type Response = json.RawMessage diff --git a/typedapi/cluster/state/state.go b/typedapi/cluster/state/state.go index 5efa909261..32f605df5e 100644 --- a/typedapi/cluster/state/state.go +++ b/typedapi/cluster/state/state.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns a comprehensive information about the state of the cluster. package state diff --git a/typedapi/cluster/stats/response.go b/typedapi/cluster/stats/response.go index ab9dfe035d..73683f2fce 100644 --- a/typedapi/cluster/stats/response.go +++ b/typedapi/cluster/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stats @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/ClusterStatsResponse.ts#L53-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/ClusterStatsResponse.ts#L53-L55 type Response struct { // ClusterName Name of the cluster, based on the cluster name setting. diff --git a/typedapi/cluster/stats/stats.go b/typedapi/cluster/stats/stats.go index 383d933e19..ff6867f436 100644 --- a/typedapi/cluster/stats/stats.go +++ b/typedapi/cluster/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns high-level overview of cluster statistics. package stats diff --git a/typedapi/core/bulk/bulk.go b/typedapi/core/bulk/bulk.go index 18ae0eae57..c57be9a6b1 100644 --- a/typedapi/core/bulk/bulk.go +++ b/typedapi/core/bulk/bulk.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows to perform multiple index/update/delete operations in a single // request. diff --git a/typedapi/core/bulk/request.go b/typedapi/core/bulk/request.go index ab7419c3c1..c51aba749d 100644 --- a/typedapi/core/bulk/request.go +++ b/typedapi/core/bulk/request.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package bulk // Request holds the request body struct for the package bulk // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/bulk/BulkRequest.ts#L32-L103 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/bulk/BulkRequest.ts#L32-L103 type Request = []interface{} diff --git a/typedapi/core/bulk/response.go b/typedapi/core/bulk/response.go index 2f1ef4c956..79fdfb46b6 100644 --- a/typedapi/core/bulk/response.go +++ b/typedapi/core/bulk/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package bulk @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package bulk // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/bulk/BulkResponse.ts#L24-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/bulk/BulkResponse.ts#L24-L31 type Response struct { Errors bool `json:"errors"` IngestTook *int64 `json:"ingest_took,omitempty"` diff --git a/typedapi/core/clearscroll/clear_scroll.go b/typedapi/core/clearscroll/clear_scroll.go index 88e314c88c..fec0b92434 100644 --- a/typedapi/core/clearscroll/clear_scroll.go +++ b/typedapi/core/clearscroll/clear_scroll.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Explicitly clears the search context for a scroll. package clearscroll diff --git a/typedapi/core/clearscroll/request.go b/typedapi/core/clearscroll/request.go index 4f08a6ab99..5e2e14eac4 100644 --- a/typedapi/core/clearscroll/request.go +++ b/typedapi/core/clearscroll/request.go @@ -16,21 +16,18 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clearscroll import ( - "bytes" "encoding/json" - "errors" "fmt" - "io" ) // Request holds the request body struct for the package clearscroll // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/clear_scroll/ClearScrollRequest.ts#L23-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/clear_scroll/ClearScrollRequest.ts#L23-L45 type Request struct { // ScrollId Scroll IDs to clear. @@ -55,38 +52,3 @@ func (r *Request) FromJSON(data string) (*Request, error) { return &req, nil } - -func (s *Request) UnmarshalJSON(data []byte) error { - dec := json.NewDecoder(bytes.NewReader(data)) - - for { - t, err := dec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "scroll_id": - rawMsg := json.RawMessage{} - dec.Decode(&rawMsg) - if !bytes.HasPrefix(rawMsg, []byte("[")) { - o := new(string) - if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err - } - - s.ScrollId = append(s.ScrollId, *o) - } else { - if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.ScrollId); err != nil { - return err - } - } - - } - } - return nil -} diff --git a/typedapi/core/clearscroll/response.go b/typedapi/core/clearscroll/response.go index 6772c832d7..09e76c2227 100644 --- a/typedapi/core/clearscroll/response.go +++ b/typedapi/core/clearscroll/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clearscroll // Response holds the response body struct for the package clearscroll // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/clear_scroll/ClearScrollResponse.ts#L22-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/clear_scroll/ClearScrollResponse.ts#L22-L36 type Response struct { NumFreed int `json:"num_freed"` Succeeded bool `json:"succeeded"` diff --git a/typedapi/core/closepointintime/close_point_in_time.go b/typedapi/core/closepointintime/close_point_in_time.go index e1e2db5cad..e6ae42abb0 100644 --- a/typedapi/core/closepointintime/close_point_in_time.go +++ b/typedapi/core/closepointintime/close_point_in_time.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Close a point in time package closepointintime diff --git a/typedapi/core/closepointintime/request.go b/typedapi/core/closepointintime/request.go index 46c4c27b16..58be126e94 100644 --- a/typedapi/core/closepointintime/request.go +++ b/typedapi/core/closepointintime/request.go @@ -16,21 +16,18 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package closepointintime import ( - "bytes" "encoding/json" - "errors" "fmt" - "io" ) // Request holds the request body struct for the package closepointintime // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/close_point_in_time/ClosePointInTimeRequest.ts#L23-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/close_point_in_time/ClosePointInTimeRequest.ts#L23-L37 type Request struct { // Id The ID of the point-in-time. @@ -54,27 +51,3 @@ func (r *Request) FromJSON(data string) (*Request, error) { return &req, nil } - -func (s *Request) UnmarshalJSON(data []byte) error { - dec := json.NewDecoder(bytes.NewReader(data)) - - for { - t, err := dec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "id": - if err := dec.Decode(&s.Id); err != nil { - return err - } - - } - } - return nil -} diff --git a/typedapi/core/closepointintime/response.go b/typedapi/core/closepointintime/response.go index a1846d6f86..7d5c4a8619 100644 --- a/typedapi/core/closepointintime/response.go +++ b/typedapi/core/closepointintime/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package closepointintime // Response holds the response body struct for the package closepointintime // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/close_point_in_time/ClosePointInTimeResponse.ts#L22-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/close_point_in_time/ClosePointInTimeResponse.ts#L22-L36 type Response struct { NumFreed int `json:"num_freed"` Succeeded bool `json:"succeeded"` diff --git a/typedapi/core/count/count.go b/typedapi/core/count/count.go index 7fae63addc..52137f024f 100644 --- a/typedapi/core/count/count.go +++ b/typedapi/core/count/count.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns number of documents matching a query. package count diff --git a/typedapi/core/count/request.go b/typedapi/core/count/request.go index f8c4baaaf6..d275035b1d 100644 --- a/typedapi/core/count/request.go +++ b/typedapi/core/count/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package count @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package count // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/count/CountRequest.ts#L26-L120 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/count/CountRequest.ts#L26-L120 type Request struct { // Query Defines the search definition using the Query DSL. diff --git a/typedapi/core/count/response.go b/typedapi/core/count/response.go index 4b8c4eafaf..7f8eacdfff 100644 --- a/typedapi/core/count/response.go +++ b/typedapi/core/count/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package count @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package count // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/count/CountResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/count/CountResponse.ts#L23-L25 type Response struct { Count int64 `json:"count"` Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/core/create/create.go b/typedapi/core/create/create.go index 393a37406b..e41f30f5fb 100644 --- a/typedapi/core/create/create.go +++ b/typedapi/core/create/create.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a new document in the index. // diff --git a/typedapi/core/create/request.go b/typedapi/core/create/request.go index bb482ec4fc..f589c0ed3a 100644 --- a/typedapi/core/create/request.go +++ b/typedapi/core/create/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package create @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/create/CreateRequest.ts#L32-L95 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/create/CreateRequest.ts#L32-L95 type Request = json.RawMessage diff --git a/typedapi/core/create/response.go b/typedapi/core/create/response.go index 2d2ccbe630..d6c5645e80 100644 --- a/typedapi/core/create/response.go +++ b/typedapi/core/create/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package create @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/create/CreateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/create/CreateResponse.ts#L22-L24 type Response struct { ForcedRefresh *bool `json:"forced_refresh,omitempty"` Id_ string `json:"_id"` diff --git a/typedapi/core/delete/delete.go b/typedapi/core/delete/delete.go index a4d0ec2043..f34e5310da 100644 --- a/typedapi/core/delete/delete.go +++ b/typedapi/core/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Removes a document from the index. package delete diff --git a/typedapi/core/delete/response.go b/typedapi/core/delete/response.go index 99838dbec6..0a82143d3d 100644 --- a/typedapi/core/delete/response.go +++ b/typedapi/core/delete/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package delete @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/delete/DeleteResponse.ts#L22-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/delete/DeleteResponse.ts#L22-L34 type Response struct { ForcedRefresh *bool `json:"forced_refresh,omitempty"` Id_ string `json:"_id"` diff --git a/typedapi/core/deletebyquery/delete_by_query.go b/typedapi/core/deletebyquery/delete_by_query.go index d36d02298a..dc453b7e04 100644 --- a/typedapi/core/deletebyquery/delete_by_query.go +++ b/typedapi/core/deletebyquery/delete_by_query.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes documents matching the provided query. package deletebyquery diff --git a/typedapi/core/deletebyquery/request.go b/typedapi/core/deletebyquery/request.go index da98f7ea99..fc15414249 100644 --- a/typedapi/core/deletebyquery/request.go +++ b/typedapi/core/deletebyquery/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletebyquery @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package deletebyquery // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/delete_by_query/DeleteByQueryRequest.ts#L36-L209 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/delete_by_query/DeleteByQueryRequest.ts#L36-L209 type Request struct { // MaxDocs The maximum number of documents to delete. diff --git a/typedapi/core/deletebyquery/response.go b/typedapi/core/deletebyquery/response.go index 5653a3ba33..08f8c556f2 100644 --- a/typedapi/core/deletebyquery/response.go +++ b/typedapi/core/deletebyquery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletebyquery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deletebyquery // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/delete_by_query/DeleteByQueryResponse.ts#L26-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/delete_by_query/DeleteByQueryResponse.ts#L26-L45 type Response struct { Batches *int64 `json:"batches,omitempty"` Deleted *int64 `json:"deleted,omitempty"` diff --git a/typedapi/core/deletebyqueryrethrottle/delete_by_query_rethrottle.go b/typedapi/core/deletebyqueryrethrottle/delete_by_query_rethrottle.go index 1ee7e756ab..fb200b5d86 100644 --- a/typedapi/core/deletebyqueryrethrottle/delete_by_query_rethrottle.go +++ b/typedapi/core/deletebyqueryrethrottle/delete_by_query_rethrottle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Changes the number of requests per second for a particular Delete By Query // operation. diff --git a/typedapi/core/deletebyqueryrethrottle/response.go b/typedapi/core/deletebyqueryrethrottle/response.go index 9160e4bd69..a7c5da114d 100644 --- a/typedapi/core/deletebyqueryrethrottle/response.go +++ b/typedapi/core/deletebyqueryrethrottle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletebyqueryrethrottle @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types" @@ -31,7 +32,7 @@ import ( // Response holds the response body struct for the package deletebyqueryrethrottle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/delete_by_query_rethrottle/DeleteByQueryRethrottleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/delete_by_query_rethrottle/DeleteByQueryRethrottleResponse.ts#L22-L24 type Response struct { NodeFailures []types.ErrorCause `json:"node_failures,omitempty"` // Nodes Task information grouped by node, if `group_by` was set to `node` (the @@ -68,7 +69,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "node_failures": if err := dec.Decode(&s.NodeFailures); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeFailures", err) } case "nodes": @@ -76,12 +77,12 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Nodes = make(map[string]types.NodeTasks, 0) } if err := dec.Decode(&s.Nodes); err != nil { - return err + return fmt.Errorf("%s | %w", "Nodes", err) } case "task_failures": if err := dec.Decode(&s.TaskFailures); err != nil { - return err + return fmt.Errorf("%s | %w", "TaskFailures", err) } case "tasks": @@ -94,13 +95,13 @@ func (s *Response) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]types.ParentTaskInfo, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Tasks", err) } s.Tasks = o case '[': o := []types.TaskInfo{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Tasks", err) } s.Tasks = o } diff --git a/typedapi/core/deletescript/delete_script.go b/typedapi/core/deletescript/delete_script.go index cd1a7695e1..b803f89dc3 100644 --- a/typedapi/core/deletescript/delete_script.go +++ b/typedapi/core/deletescript/delete_script.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes a script. package deletescript diff --git a/typedapi/core/deletescript/response.go b/typedapi/core/deletescript/response.go index 897671a892..378e46fc52 100644 --- a/typedapi/core/deletescript/response.go +++ b/typedapi/core/deletescript/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletescript // Response holds the response body struct for the package deletescript // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/delete_script/DeleteScriptResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/delete_script/DeleteScriptResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/core/exists/exists.go b/typedapi/core/exists/exists.go index 8576258d3e..797f646453 100644 --- a/typedapi/core/exists/exists.go +++ b/typedapi/core/exists/exists.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about whether a document exists in an index. package exists diff --git a/typedapi/core/existssource/exists_source.go b/typedapi/core/existssource/exists_source.go index 96649901dc..8a5ec4a4fc 100644 --- a/typedapi/core/existssource/exists_source.go +++ b/typedapi/core/existssource/exists_source.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about whether a document source exists in an index. package existssource diff --git a/typedapi/core/explain/explain.go b/typedapi/core/explain/explain.go index 00af5df315..48738060e7 100644 --- a/typedapi/core/explain/explain.go +++ b/typedapi/core/explain/explain.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about why a specific matches (or doesn't match) a query. package explain diff --git a/typedapi/core/explain/request.go b/typedapi/core/explain/request.go index 9c0117b1e7..8837c01b13 100644 --- a/typedapi/core/explain/request.go +++ b/typedapi/core/explain/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package explain @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package explain // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/explain/ExplainRequest.ts#L26-L105 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/explain/ExplainRequest.ts#L26-L105 type Request struct { // Query Defines the search definition using the Query DSL. diff --git a/typedapi/core/explain/response.go b/typedapi/core/explain/response.go index d255b216f6..a7d3d2ddf2 100644 --- a/typedapi/core/explain/response.go +++ b/typedapi/core/explain/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package explain @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package explain // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/explain/ExplainResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/explain/ExplainResponse.ts#L23-L31 type Response struct { Explanation *types.ExplanationDetail `json:"explanation,omitempty"` Get *types.InlineGet `json:"get,omitempty"` diff --git a/typedapi/core/fieldcaps/field_caps.go b/typedapi/core/fieldcaps/field_caps.go index fc6eebb0bf..44173454f7 100644 --- a/typedapi/core/fieldcaps/field_caps.go +++ b/typedapi/core/fieldcaps/field_caps.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the information about the capabilities of fields among multiple // indices. diff --git a/typedapi/core/fieldcaps/request.go b/typedapi/core/fieldcaps/request.go index a3e0c1d605..515aafa947 100644 --- a/typedapi/core/fieldcaps/request.go +++ b/typedapi/core/fieldcaps/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package fieldcaps @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package fieldcaps // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/field_caps/FieldCapabilitiesRequest.ts#L25-L106 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/field_caps/FieldCapabilitiesRequest.ts#L25-L106 type Request struct { // Fields List of fields to retrieve capabilities for. Wildcard (`*`) expressions are @@ -86,24 +86,24 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } s.Fields = append(s.Fields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } } case "index_filter": if err := dec.Decode(&s.IndexFilter); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexFilter", err) } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } } diff --git a/typedapi/core/fieldcaps/response.go b/typedapi/core/fieldcaps/response.go index be0cbf4539..bf1d99587a 100644 --- a/typedapi/core/fieldcaps/response.go +++ b/typedapi/core/fieldcaps/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package fieldcaps @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types" @@ -31,7 +32,7 @@ import ( // Response holds the response body struct for the package fieldcaps // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/field_caps/FieldCapabilitiesResponse.ts#L24-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/field_caps/FieldCapabilitiesResponse.ts#L24-L35 type Response struct { Fields map[string]map[string]types.FieldCapability `json:"fields"` Indices []string `json:"indices"` @@ -64,7 +65,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]map[string]types.FieldCapability, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "indices": @@ -73,13 +74,13 @@ func (s *Response) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } diff --git a/typedapi/core/get/get.go b/typedapi/core/get/get.go index bcd046831a..dc2ff4a042 100644 --- a/typedapi/core/get/get.go +++ b/typedapi/core/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns a document. package get diff --git a/typedapi/core/get/response.go b/typedapi/core/get/response.go index 3df47d21eb..a89f9c246d 100644 --- a/typedapi/core/get/response.go +++ b/typedapi/core/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/get/GetResponse.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/get/GetResponse.ts#L23-L34 type Response struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` diff --git a/typedapi/core/getscript/get_script.go b/typedapi/core/getscript/get_script.go index 2b62090cdd..7ac3050e15 100644 --- a/typedapi/core/getscript/get_script.go +++ b/typedapi/core/getscript/get_script.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns a script. package getscript diff --git a/typedapi/core/getscript/response.go b/typedapi/core/getscript/response.go index 47d33dfde1..026dc17a85 100644 --- a/typedapi/core/getscript/response.go +++ b/typedapi/core/getscript/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getscript @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getscript // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/get_script/GetScriptResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/get_script/GetScriptResponse.ts#L23-L29 type Response struct { Found bool `json:"found"` Id_ string `json:"_id"` diff --git a/typedapi/core/getscriptcontext/get_script_context.go b/typedapi/core/getscriptcontext/get_script_context.go index 8633b7a5d0..f1422345b3 100644 --- a/typedapi/core/getscriptcontext/get_script_context.go +++ b/typedapi/core/getscriptcontext/get_script_context.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns all script contexts. package getscriptcontext diff --git a/typedapi/core/getscriptcontext/response.go b/typedapi/core/getscriptcontext/response.go index 993dc2de05..30848d04b5 100644 --- a/typedapi/core/getscriptcontext/response.go +++ b/typedapi/core/getscriptcontext/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getscriptcontext @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getscriptcontext // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/get_script_context/GetScriptContextResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/get_script_context/GetScriptContextResponse.ts#L22-L26 type Response struct { Contexts []types.GetScriptContext `json:"contexts"` } diff --git a/typedapi/core/getscriptlanguages/get_script_languages.go b/typedapi/core/getscriptlanguages/get_script_languages.go index ce71ba675b..636d90801e 100644 --- a/typedapi/core/getscriptlanguages/get_script_languages.go +++ b/typedapi/core/getscriptlanguages/get_script_languages.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns available script types, languages and contexts package getscriptlanguages diff --git a/typedapi/core/getscriptlanguages/response.go b/typedapi/core/getscriptlanguages/response.go index 5b5fa9f244..5a5dc9bd9b 100644 --- a/typedapi/core/getscriptlanguages/response.go +++ b/typedapi/core/getscriptlanguages/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getscriptlanguages @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getscriptlanguages // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/get_script_languages/GetScriptLanguagesResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/get_script_languages/GetScriptLanguagesResponse.ts#L22-L27 type Response struct { LanguageContexts []types.LanguageContext `json:"language_contexts"` TypesAllowed []string `json:"types_allowed"` diff --git a/typedapi/core/getsource/get_source.go b/typedapi/core/getsource/get_source.go index 30491eaf8d..769c5bc42b 100644 --- a/typedapi/core/getsource/get_source.go +++ b/typedapi/core/getsource/get_source.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the source of a document. package getsource diff --git a/typedapi/core/getsource/response.go b/typedapi/core/getsource/response.go index 836bce1a36..b7d2405094 100644 --- a/typedapi/core/getsource/response.go +++ b/typedapi/core/getsource/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getsource @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getsource // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/get_source/SourceResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/get_source/SourceResponse.ts#L20-L22 type Response = json.RawMessage diff --git a/typedapi/core/healthreport/health_report.go b/typedapi/core/healthreport/health_report.go index c490397717..bbec83760b 100644 --- a/typedapi/core/healthreport/health_report.go +++ b/typedapi/core/healthreport/health_report.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the health of the cluster. package healthreport diff --git a/typedapi/core/healthreport/response.go b/typedapi/core/healthreport/response.go index ce26be8fc5..f4ed3600a8 100644 --- a/typedapi/core/healthreport/response.go +++ b/typedapi/core/healthreport/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package healthreport @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package healthreport // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/Response.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/Response.ts#L22-L28 type Response struct { ClusterName string `json:"cluster_name"` Indicators types.Indicators `json:"indicators"` diff --git a/typedapi/core/index/index.go b/typedapi/core/index/index.go index 5ec30a3dcc..97b52069a0 100644 --- a/typedapi/core/index/index.go +++ b/typedapi/core/index/index.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates or updates a document in an index. package index diff --git a/typedapi/core/index/request.go b/typedapi/core/index/request.go index e39a8cc204..6175ad2016 100644 --- a/typedapi/core/index/request.go +++ b/typedapi/core/index/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package index @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package index // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/index/IndexRequest.ts#L35-L117 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/index/IndexRequest.ts#L35-L117 type Request = json.RawMessage diff --git a/typedapi/core/index/response.go b/typedapi/core/index/response.go index 151c331437..37ba6255e5 100644 --- a/typedapi/core/index/response.go +++ b/typedapi/core/index/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package index @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package index // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/index/IndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/index/IndexResponse.ts#L22-L24 type Response struct { ForcedRefresh *bool `json:"forced_refresh,omitempty"` Id_ string `json:"_id"` diff --git a/typedapi/core/info/info.go b/typedapi/core/info/info.go index 93fd3f4733..0d648fe498 100644 --- a/typedapi/core/info/info.go +++ b/typedapi/core/info/info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns basic information about the cluster. package info diff --git a/typedapi/core/info/response.go b/typedapi/core/info/response.go index 035c188a57..ffc4ab15ce 100644 --- a/typedapi/core/info/response.go +++ b/typedapi/core/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/info/RootNodeInfoResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/info/RootNodeInfoResponse.ts#L23-L31 type Response struct { ClusterName string `json:"cluster_name"` ClusterUuid string `json:"cluster_uuid"` diff --git a/typedapi/core/knnsearch/knn_search.go b/typedapi/core/knnsearch/knn_search.go index a371c2c8dc..3db7884899 100644 --- a/typedapi/core/knnsearch/knn_search.go +++ b/typedapi/core/knnsearch/knn_search.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Performs a kNN search. package knnsearch diff --git a/typedapi/core/knnsearch/request.go b/typedapi/core/knnsearch/request.go index ba7eda55c9..3a716b560f 100644 --- a/typedapi/core/knnsearch/request.go +++ b/typedapi/core/knnsearch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package knnsearch @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package knnsearch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/knn_search/KnnSearchRequest.ts#L27-L80 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/knn_search/KnnSearchRequest.ts#L27-L80 type Request struct { // DocvalueFields The request returns doc values for field names matching these patterns @@ -96,7 +96,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "docvalue_fields": if err := dec.Decode(&s.DocvalueFields); err != nil { - return err + return fmt.Errorf("%s | %w", "DocvalueFields", err) } case "fields": @@ -105,13 +105,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } s.Fields = append(s.Fields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } } @@ -121,24 +121,24 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := types.NewQuery() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } s.Filter = append(s.Filter, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } } case "knn": if err := dec.Decode(&s.Knn); err != nil { - return err + return fmt.Errorf("%s | %w", "Knn", err) } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } case "stored_fields": @@ -147,13 +147,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } s.StoredFields = append(s.StoredFields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StoredFields); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } } diff --git a/typedapi/core/knnsearch/response.go b/typedapi/core/knnsearch/response.go index 58b85eeb6d..0756fddb0f 100644 --- a/typedapi/core/knnsearch/response.go +++ b/typedapi/core/knnsearch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package knnsearch @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package knnsearch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/knn_search/KnnSearchResponse.ts#L26-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/knn_search/KnnSearchResponse.ts#L26-L54 type Response struct { // Fields Contains field values for the documents. These fields diff --git a/typedapi/core/mget/mget.go b/typedapi/core/mget/mget.go index 3688d0ffbb..228dff1dc6 100644 --- a/typedapi/core/mget/mget.go +++ b/typedapi/core/mget/mget.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows to get multiple documents in one request. package mget diff --git a/typedapi/core/mget/request.go b/typedapi/core/mget/request.go index 0f81205029..9868cb30e2 100644 --- a/typedapi/core/mget/request.go +++ b/typedapi/core/mget/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package mget @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package mget // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/mget/MultiGetRequest.ts#L25-L91 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/mget/MultiGetRequest.ts#L25-L91 type Request struct { // Docs The documents you want to retrieve. Required if no index is specified in the @@ -77,7 +77,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "docs": if err := dec.Decode(&s.Docs); err != nil { - return err + return fmt.Errorf("%s | %w", "Docs", err) } case "ids": @@ -86,13 +86,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Ids", err) } s.Ids = append(s.Ids, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Ids); err != nil { - return err + return fmt.Errorf("%s | %w", "Ids", err) } } diff --git a/typedapi/core/mget/response.go b/typedapi/core/mget/response.go index a240520d7d..8bd5283709 100644 --- a/typedapi/core/mget/response.go +++ b/typedapi/core/mget/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package mget @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types" @@ -31,7 +32,7 @@ import ( // Response holds the response body struct for the package mget // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/mget/MultiGetResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/mget/MultiGetResponse.ts#L22-L26 type Response struct { Docs []types.MgetResponseItem `json:"docs"` } @@ -59,7 +60,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "docs": messageArray := []json.RawMessage{} if err := dec.Decode(&messageArray); err != nil { - return err + return fmt.Errorf("%s | %w", "Docs", err) } docs: for _, message := range messageArray { @@ -70,7 +71,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Docs", err) } switch t { @@ -79,7 +80,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { o := types.NewGetResult() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Docs", err) } s.Docs = append(s.Docs, o) continue docs @@ -88,7 +89,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { o := types.NewMultiGetError() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Docs", err) } s.Docs = append(s.Docs, o) continue docs diff --git a/typedapi/core/msearch/msearch.go b/typedapi/core/msearch/msearch.go index c8448774bf..e7381eea0f 100644 --- a/typedapi/core/msearch/msearch.go +++ b/typedapi/core/msearch/msearch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows to execute several search operations in one request. package msearch diff --git a/typedapi/core/msearch/request.go b/typedapi/core/msearch/request.go index 62000b0d79..0163aa66a1 100644 --- a/typedapi/core/msearch/request.go +++ b/typedapi/core/msearch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package msearch @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package msearch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/msearch/MultiSearchRequest.ts#L25-L96 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/msearch/MultiSearchRequest.ts#L25-L96 type Request = []types.MsearchRequestItem diff --git a/typedapi/core/msearch/response.go b/typedapi/core/msearch/response.go index 0023ebfee9..6286fd4387 100644 --- a/typedapi/core/msearch/response.go +++ b/typedapi/core/msearch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package msearch @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // Response holds the response body struct for the package msearch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/msearch/MultiSearchResponse.ts#L25-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/msearch/MultiSearchResponse.ts#L25-L27 type Response struct { Responses []types.MsearchResponseItem `json:"responses"` Took int64 `json:"took"` @@ -61,7 +62,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "responses": messageArray := []json.RawMessage{} if err := dec.Decode(&messageArray); err != nil { - return err + return fmt.Errorf("%s | %w", "Responses", err) } responses: for _, message := range messageArray { @@ -72,7 +73,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Responses", err) } switch t { @@ -81,7 +82,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { o := types.NewMultiSearchItem() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Responses", err) } s.Responses = append(s.Responses, o) continue responses @@ -90,7 +91,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { o := types.NewErrorResponseBase() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Responses", err) } s.Responses = append(s.Responses, o) continue responses @@ -106,7 +107,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } s.Took = value case float64: diff --git a/typedapi/core/msearchtemplate/msearch_template.go b/typedapi/core/msearchtemplate/msearch_template.go index 159473a23b..c946eaed99 100644 --- a/typedapi/core/msearchtemplate/msearch_template.go +++ b/typedapi/core/msearchtemplate/msearch_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows to execute several search template operations in one request. package msearchtemplate diff --git a/typedapi/core/msearchtemplate/request.go b/typedapi/core/msearchtemplate/request.go index 7f061b456d..720ae11f52 100644 --- a/typedapi/core/msearchtemplate/request.go +++ b/typedapi/core/msearchtemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package msearchtemplate @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package msearchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/msearch_template/MultiSearchTemplateRequest.ts#L25-L70 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/msearch_template/MultiSearchTemplateRequest.ts#L25-L70 type Request = []types.RequestItem diff --git a/typedapi/core/msearchtemplate/response.go b/typedapi/core/msearchtemplate/response.go index ab3336533d..e837f65433 100644 --- a/typedapi/core/msearchtemplate/response.go +++ b/typedapi/core/msearchtemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package msearchtemplate @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // Response holds the response body struct for the package msearchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/msearch_template/MultiSearchTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/msearch_template/MultiSearchTemplateResponse.ts#L22-L24 type Response struct { Responses []types.MsearchResponseItem `json:"responses"` Took int64 `json:"took"` @@ -61,7 +62,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "responses": messageArray := []json.RawMessage{} if err := dec.Decode(&messageArray); err != nil { - return err + return fmt.Errorf("%s | %w", "Responses", err) } responses: for _, message := range messageArray { @@ -72,7 +73,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Responses", err) } switch t { @@ -81,7 +82,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { o := types.NewMultiSearchItem() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Responses", err) } s.Responses = append(s.Responses, o) continue responses @@ -90,7 +91,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { o := types.NewErrorResponseBase() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Responses", err) } s.Responses = append(s.Responses, o) continue responses @@ -106,7 +107,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } s.Took = value case float64: diff --git a/typedapi/core/mtermvectors/mtermvectors.go b/typedapi/core/mtermvectors/mtermvectors.go index 085ec91c77..6a1f34d61f 100644 --- a/typedapi/core/mtermvectors/mtermvectors.go +++ b/typedapi/core/mtermvectors/mtermvectors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns multiple termvectors in one request. package mtermvectors diff --git a/typedapi/core/mtermvectors/request.go b/typedapi/core/mtermvectors/request.go index 70a4c6b5a4..30262f3969 100644 --- a/typedapi/core/mtermvectors/request.go +++ b/typedapi/core/mtermvectors/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package mtermvectors @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package mtermvectors // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/mtermvectors/MultiTermVectorsRequest.ts#L31-L109 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/mtermvectors/MultiTermVectorsRequest.ts#L31-L109 type Request struct { // Docs Array of existing or artificial documents. diff --git a/typedapi/core/mtermvectors/response.go b/typedapi/core/mtermvectors/response.go index d3db64cb2b..92ef15ff43 100644 --- a/typedapi/core/mtermvectors/response.go +++ b/typedapi/core/mtermvectors/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package mtermvectors @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mtermvectors // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/mtermvectors/MultiTermVectorsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/mtermvectors/MultiTermVectorsResponse.ts#L22-L24 type Response struct { Docs []types.TermVectorsResult `json:"docs"` } diff --git a/typedapi/core/openpointintime/open_point_in_time.go b/typedapi/core/openpointintime/open_point_in_time.go index 2add1b2b34..a05ec8a0a5 100644 --- a/typedapi/core/openpointintime/open_point_in_time.go +++ b/typedapi/core/openpointintime/open_point_in_time.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Open a point in time that can be used in subsequent searches package openpointintime diff --git a/typedapi/core/openpointintime/response.go b/typedapi/core/openpointintime/response.go index e86f86dcf0..1c12bf5b66 100644 --- a/typedapi/core/openpointintime/response.go +++ b/typedapi/core/openpointintime/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package openpointintime // Response holds the response body struct for the package openpointintime // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/open_point_in_time/OpenPointInTimeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/open_point_in_time/OpenPointInTimeResponse.ts#L22-L24 type Response struct { Id string `json:"id"` } diff --git a/typedapi/core/ping/ping.go b/typedapi/core/ping/ping.go index 894c66f583..6721bb7cb4 100644 --- a/typedapi/core/ping/ping.go +++ b/typedapi/core/ping/ping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns whether the cluster is running. package ping diff --git a/typedapi/core/putscript/put_script.go b/typedapi/core/putscript/put_script.go index 24eb3c0ad9..3528649562 100644 --- a/typedapi/core/putscript/put_script.go +++ b/typedapi/core/putscript/put_script.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates or updates a script. package putscript diff --git a/typedapi/core/putscript/request.go b/typedapi/core/putscript/request.go index 4ef44db163..d7396c43fc 100644 --- a/typedapi/core/putscript/request.go +++ b/typedapi/core/putscript/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putscript @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putscript // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/put_script/PutScriptRequest.ts#L25-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/put_script/PutScriptRequest.ts#L25-L64 type Request struct { // Script Contains the script or search template, its parameters, and its language. diff --git a/typedapi/core/putscript/response.go b/typedapi/core/putscript/response.go index dfdf2aa60b..fdabbc5705 100644 --- a/typedapi/core/putscript/response.go +++ b/typedapi/core/putscript/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putscript // Response holds the response body struct for the package putscript // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/put_script/PutScriptResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/put_script/PutScriptResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/core/rankeval/rank_eval.go b/typedapi/core/rankeval/rank_eval.go index aff010176c..a60fda37b9 100644 --- a/typedapi/core/rankeval/rank_eval.go +++ b/typedapi/core/rankeval/rank_eval.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows to evaluate the quality of ranked search results over a set of typical // search queries diff --git a/typedapi/core/rankeval/request.go b/typedapi/core/rankeval/request.go index 2afc8d3ce2..cefe0e1c63 100644 --- a/typedapi/core/rankeval/request.go +++ b/typedapi/core/rankeval/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package rankeval @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package rankeval // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/RankEvalRequest.ts#L24-L61 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/RankEvalRequest.ts#L24-L61 type Request struct { // Metric Definition of the evaluation metric to calculate. diff --git a/typedapi/core/rankeval/response.go b/typedapi/core/rankeval/response.go index 2ae1c55d2e..58c508a5d8 100644 --- a/typedapi/core/rankeval/response.go +++ b/typedapi/core/rankeval/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package rankeval @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package rankeval // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/RankEvalResponse.ts#L26-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/RankEvalResponse.ts#L26-L34 type Response struct { // Details The details section contains one entry for every query in the original diff --git a/typedapi/core/reindex/reindex.go b/typedapi/core/reindex/reindex.go index 36cb6f2b52..81493ad44d 100644 --- a/typedapi/core/reindex/reindex.go +++ b/typedapi/core/reindex/reindex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows to copy documents from one index to another, optionally filtering the // source diff --git a/typedapi/core/reindex/request.go b/typedapi/core/reindex/request.go index f1ca313953..2365c91914 100644 --- a/typedapi/core/reindex/request.go +++ b/typedapi/core/reindex/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package reindex @@ -34,7 +34,7 @@ import ( // Request holds the request body struct for the package reindex // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/reindex/ReindexRequest.ts#L27-L101 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/reindex/ReindexRequest.ts#L27-L101 type Request struct { // Conflicts Set to proceed to continue reindexing even if there are conflicts. @@ -84,12 +84,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "conflicts": if err := dec.Decode(&s.Conflicts); err != nil { - return err + return fmt.Errorf("%s | %w", "Conflicts", err) } case "dest": if err := dec.Decode(&s.Dest); err != nil { - return err + return fmt.Errorf("%s | %w", "Dest", err) } case "max_docs": @@ -99,7 +99,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxDocs", err) } s.MaxDocs = &value case float64: @@ -110,7 +110,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -119,7 +119,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -128,7 +128,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { o := types.NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -136,7 +136,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { o := types.NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -150,7 +150,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -160,7 +160,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "source": if err := dec.Decode(&s.Source); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } } diff --git a/typedapi/core/reindex/response.go b/typedapi/core/reindex/response.go index 23d582cf5e..1c01f6bd5d 100644 --- a/typedapi/core/reindex/response.go +++ b/typedapi/core/reindex/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package reindex @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package reindex // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/reindex/ReindexResponse.ts#L26-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/reindex/ReindexResponse.ts#L26-L45 type Response struct { Batches *int64 `json:"batches,omitempty"` Created *int64 `json:"created,omitempty"` diff --git a/typedapi/core/reindexrethrottle/reindex_rethrottle.go b/typedapi/core/reindexrethrottle/reindex_rethrottle.go index d8f5c79acc..e4dab406db 100644 --- a/typedapi/core/reindexrethrottle/reindex_rethrottle.go +++ b/typedapi/core/reindexrethrottle/reindex_rethrottle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Changes the number of requests per second for a particular Reindex operation. package reindexrethrottle diff --git a/typedapi/core/reindexrethrottle/response.go b/typedapi/core/reindexrethrottle/response.go index 967cfac15d..e311b9094a 100644 --- a/typedapi/core/reindexrethrottle/response.go +++ b/typedapi/core/reindexrethrottle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package reindexrethrottle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package reindexrethrottle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/reindex_rethrottle/ReindexRethrottleResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/reindex_rethrottle/ReindexRethrottleResponse.ts#L23-L25 type Response struct { Nodes map[string]types.ReindexNode `json:"nodes"` } diff --git a/typedapi/core/rendersearchtemplate/render_search_template.go b/typedapi/core/rendersearchtemplate/render_search_template.go index 0663d8ac90..87d41fb906 100644 --- a/typedapi/core/rendersearchtemplate/render_search_template.go +++ b/typedapi/core/rendersearchtemplate/render_search_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows to use the Mustache language to pre-render a search definition. package rendersearchtemplate diff --git a/typedapi/core/rendersearchtemplate/request.go b/typedapi/core/rendersearchtemplate/request.go index 517cd78f37..d4324bd7db 100644 --- a/typedapi/core/rendersearchtemplate/request.go +++ b/typedapi/core/rendersearchtemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package rendersearchtemplate @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package rendersearchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/render_search_template/RenderSearchTemplateRequest.ts#L25-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/render_search_template/RenderSearchTemplateRequest.ts#L25-L55 type Request struct { File *string `json:"file,omitempty"` // Params Key-value pairs used to replace Mustache variables in the template. diff --git a/typedapi/core/rendersearchtemplate/response.go b/typedapi/core/rendersearchtemplate/response.go index 6a6b2be8da..509ad5bd10 100644 --- a/typedapi/core/rendersearchtemplate/response.go +++ b/typedapi/core/rendersearchtemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package rendersearchtemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package rendersearchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/render_search_template/RenderSearchTemplateResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/render_search_template/RenderSearchTemplateResponse.ts#L23-L25 type Response struct { TemplateOutput map[string]json.RawMessage `json:"template_output"` } diff --git a/typedapi/core/scriptspainlessexecute/request.go b/typedapi/core/scriptspainlessexecute/request.go index e6d66c603b..e6748768d3 100644 --- a/typedapi/core/scriptspainlessexecute/request.go +++ b/typedapi/core/scriptspainlessexecute/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package scriptspainlessexecute @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package scriptspainlessexecute // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/scripts_painless_execute/ExecutePainlessScriptRequest.ts#L24-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/scripts_painless_execute/ExecutePainlessScriptRequest.ts#L24-L46 type Request struct { // Context The context that the script should run in. diff --git a/typedapi/core/scriptspainlessexecute/response.go b/typedapi/core/scriptspainlessexecute/response.go index 30cf2983bc..b48b68c425 100644 --- a/typedapi/core/scriptspainlessexecute/response.go +++ b/typedapi/core/scriptspainlessexecute/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package scriptspainlessexecute @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package scriptspainlessexecute // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/scripts_painless_execute/ExecutePainlessScriptResponse.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/scripts_painless_execute/ExecutePainlessScriptResponse.ts#L20-L24 type Response struct { Result json.RawMessage `json:"result,omitempty"` } diff --git a/typedapi/core/scriptspainlessexecute/scripts_painless_execute.go b/typedapi/core/scriptspainlessexecute/scripts_painless_execute.go index 381ebd34a7..c99fdb646d 100644 --- a/typedapi/core/scriptspainlessexecute/scripts_painless_execute.go +++ b/typedapi/core/scriptspainlessexecute/scripts_painless_execute.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows an arbitrary script to be executed and a result to be returned package scriptspainlessexecute diff --git a/typedapi/core/scroll/request.go b/typedapi/core/scroll/request.go index faa0cbf3f9..a7a9cb198a 100644 --- a/typedapi/core/scroll/request.go +++ b/typedapi/core/scroll/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package scroll @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package scroll // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/scroll/ScrollRequest.ts#L24-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/scroll/ScrollRequest.ts#L24-L59 type Request struct { // Scroll Period to retain the search context for scrolling. @@ -75,12 +75,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "scroll": if err := dec.Decode(&s.Scroll); err != nil { - return err + return fmt.Errorf("%s | %w", "Scroll", err) } case "scroll_id": if err := dec.Decode(&s.ScrollId); err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollId", err) } } diff --git a/typedapi/core/scroll/response.go b/typedapi/core/scroll/response.go index e1be93493c..22bd666c9c 100644 --- a/typedapi/core/scroll/response.go +++ b/typedapi/core/scroll/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package scroll @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" "strings" @@ -33,7 +34,7 @@ import ( // Response holds the response body struct for the package scroll // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/scroll/ScrollResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/scroll/ScrollResponse.ts#L22-L24 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` Clusters_ *types.ClusterStatistics `json:"_clusters,omitempty"` @@ -100,490 +101,490 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "cardinality": o := types.NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := types.NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := types.NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := types.NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := types.NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := types.NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := types.NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := types.NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := types.NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := types.NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := types.NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := types.NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := types.NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := types.NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := types.NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := types.NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := types.NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := types.NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := types.NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := types.NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := types.NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := types.NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := types.NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := types.NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := types.NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := types.NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := types.NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := types.NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := types.NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := types.NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := types.NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := types.NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := types.NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := types.NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := types.NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := types.NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := types.NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := types.NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := types.NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := types.NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := types.NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := types.NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := types.NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := types.NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := types.NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := types.NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := types.NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := types.NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := types.NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := types.NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := types.NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := types.NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := types.NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := types.NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := types.NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := types.NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := types.NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := types.NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := types.NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := types.NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := types.NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := types.NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := types.NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := types.NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := types.NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := types.NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := types.NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := types.NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := types.NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -593,7 +594,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } @@ -602,7 +603,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "_clusters": if err := dec.Decode(&s.Clusters_); err != nil { - return err + return fmt.Errorf("%s | %w", "Clusters_", err) } case "fields": @@ -610,12 +611,12 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "hits": if err := dec.Decode(&s.Hits); err != nil { - return err + return fmt.Errorf("%s | %w", "Hits", err) } case "max_score": @@ -625,7 +626,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxScore", err) } f := types.Float64(value) s.MaxScore = &f @@ -641,7 +642,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumReducePhases", err) } s.NumReducePhases = &value case float64: @@ -651,22 +652,22 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "pit_id": if err := dec.Decode(&s.PitId); err != nil { - return err + return fmt.Errorf("%s | %w", "PitId", err) } case "profile": if err := dec.Decode(&s.Profile); err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } case "_scroll_id": if err := dec.Decode(&s.ScrollId_); err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollId_", err) } case "_shards": if err := dec.Decode(&s.Shards_); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards_", err) } case "suggest": @@ -694,28 +695,28 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "completion": o := types.NewCompletionSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "phrase": o := types.NewPhraseSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "term": o := types.NewTermSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) } @@ -725,7 +726,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[value] = append(s.Suggest[value], o) } @@ -739,7 +740,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TerminatedEarly", err) } s.TerminatedEarly = &value case bool: @@ -753,7 +754,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimedOut", err) } s.TimedOut = value case bool: @@ -767,7 +768,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } s.Took = value case float64: diff --git a/typedapi/core/scroll/scroll.go b/typedapi/core/scroll/scroll.go index d315e65bcd..723282ef5e 100644 --- a/typedapi/core/scroll/scroll.go +++ b/typedapi/core/scroll/scroll.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows to retrieve a large numbers of results from a single search request. package scroll diff --git a/typedapi/core/search/request.go b/typedapi/core/search/request.go index 8099ca6f08..4683575821 100644 --- a/typedapi/core/search/request.go +++ b/typedapi/core/search/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package search @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/SearchRequest.ts#L53-L506 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/SearchRequest.ts#L53-L506 type Request struct { // Aggregations Defines the aggregations that are run as part of the search request. @@ -195,17 +195,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Aggregations = make(map[string]types.Aggregations, 0) } if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "collapse": if err := dec.Decode(&s.Collapse); err != nil { - return err + return fmt.Errorf("%s | %w", "Collapse", err) } case "docvalue_fields": if err := dec.Decode(&s.DocvalueFields); err != nil { - return err + return fmt.Errorf("%s | %w", "DocvalueFields", err) } case "explain": @@ -215,7 +215,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Explain", err) } s.Explain = &value case bool: @@ -227,12 +227,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Ext = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Ext); err != nil { - return err + return fmt.Errorf("%s | %w", "Ext", err) } case "fields": if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "from": @@ -243,7 +243,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } s.From = &value case float64: @@ -253,12 +253,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "highlight": if err := dec.Decode(&s.Highlight); err != nil { - return err + return fmt.Errorf("%s | %w", "Highlight", err) } case "indices_boost": if err := dec.Decode(&s.IndicesBoost); err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesBoost", err) } case "knn": @@ -267,13 +267,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := types.NewKnnQuery() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Knn", err) } s.Knn = append(s.Knn, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Knn); err != nil { - return err + return fmt.Errorf("%s | %w", "Knn", err) } } @@ -284,7 +284,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinScore", err) } f := types.Float64(value) s.MinScore = &f @@ -295,12 +295,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "pit": if err := dec.Decode(&s.Pit); err != nil { - return err + return fmt.Errorf("%s | %w", "Pit", err) } case "post_filter": if err := dec.Decode(&s.PostFilter); err != nil { - return err + return fmt.Errorf("%s | %w", "PostFilter", err) } case "profile": @@ -310,7 +310,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } s.Profile = &value case bool: @@ -319,12 +319,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "rank": if err := dec.Decode(&s.Rank); err != nil { - return err + return fmt.Errorf("%s | %w", "Rank", err) } case "rescore": @@ -333,19 +333,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := types.NewRescore() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Rescore", err) } s.Rescore = append(s.Rescore, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Rescore); err != nil { - return err + return fmt.Errorf("%s | %w", "Rescore", err) } } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } case "script_fields": @@ -353,12 +353,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.ScriptFields = make(map[string]types.ScriptField, 0) } if err := dec.Decode(&s.ScriptFields); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptFields", err) } case "search_after": if err := dec.Decode(&s.SearchAfter); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchAfter", err) } case "seq_no_primary_term": @@ -368,7 +368,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNoPrimaryTerm", err) } s.SeqNoPrimaryTerm = &value case bool: @@ -383,7 +383,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -393,7 +393,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "slice": if err := dec.Decode(&s.Slice); err != nil { - return err + return fmt.Errorf("%s | %w", "Slice", err) } case "sort": @@ -402,24 +402,24 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(types.SortCombinations) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } s.Sort = append(s.Sort, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } case "stats": if err := dec.Decode(&s.Stats); err != nil { - return err + return fmt.Errorf("%s | %w", "Stats", err) } case "stored_fields": @@ -428,19 +428,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } s.StoredFields = append(s.StoredFields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StoredFields); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } } case "suggest": if err := dec.Decode(&s.Suggest); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } case "terminate_after": @@ -450,7 +450,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TerminateAfter", err) } s.TerminateAfter = &value case float64: @@ -461,7 +461,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "timeout": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -477,7 +477,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TrackScores", err) } s.TrackScores = &value case bool: @@ -486,7 +486,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "track_total_hits": if err := dec.Decode(&s.TrackTotalHits); err != nil { - return err + return fmt.Errorf("%s | %w", "TrackTotalHits", err) } case "version": @@ -496,7 +496,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } s.Version = &value case bool: diff --git a/typedapi/core/search/response.go b/typedapi/core/search/response.go index 5ae19d6dcd..822b56f7b3 100644 --- a/typedapi/core/search/response.go +++ b/typedapi/core/search/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package search @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" "strings" @@ -33,7 +34,7 @@ import ( // Response holds the response body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/SearchResponse.ts#L34-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/SearchResponse.ts#L34-L36 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` Clusters_ *types.ClusterStatistics `json:"_clusters,omitempty"` @@ -100,490 +101,490 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "cardinality": o := types.NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := types.NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := types.NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := types.NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := types.NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := types.NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := types.NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := types.NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := types.NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := types.NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := types.NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := types.NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := types.NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := types.NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := types.NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := types.NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := types.NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := types.NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := types.NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := types.NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := types.NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := types.NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := types.NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := types.NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := types.NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := types.NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := types.NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := types.NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := types.NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := types.NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := types.NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := types.NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := types.NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := types.NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := types.NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := types.NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := types.NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := types.NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := types.NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := types.NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := types.NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := types.NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := types.NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := types.NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := types.NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := types.NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := types.NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := types.NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := types.NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := types.NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := types.NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := types.NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := types.NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := types.NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := types.NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := types.NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := types.NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := types.NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := types.NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := types.NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := types.NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := types.NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := types.NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := types.NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := types.NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := types.NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := types.NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := types.NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := types.NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -593,7 +594,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } @@ -602,7 +603,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "_clusters": if err := dec.Decode(&s.Clusters_); err != nil { - return err + return fmt.Errorf("%s | %w", "Clusters_", err) } case "fields": @@ -610,12 +611,12 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "hits": if err := dec.Decode(&s.Hits); err != nil { - return err + return fmt.Errorf("%s | %w", "Hits", err) } case "max_score": @@ -625,7 +626,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxScore", err) } f := types.Float64(value) s.MaxScore = &f @@ -641,7 +642,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumReducePhases", err) } s.NumReducePhases = &value case float64: @@ -651,22 +652,22 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "pit_id": if err := dec.Decode(&s.PitId); err != nil { - return err + return fmt.Errorf("%s | %w", "PitId", err) } case "profile": if err := dec.Decode(&s.Profile); err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } case "_scroll_id": if err := dec.Decode(&s.ScrollId_); err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollId_", err) } case "_shards": if err := dec.Decode(&s.Shards_); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards_", err) } case "suggest": @@ -694,28 +695,28 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "completion": o := types.NewCompletionSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "phrase": o := types.NewPhraseSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "term": o := types.NewTermSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) } @@ -725,7 +726,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[value] = append(s.Suggest[value], o) } @@ -739,7 +740,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TerminatedEarly", err) } s.TerminatedEarly = &value case bool: @@ -753,7 +754,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimedOut", err) } s.TimedOut = value case bool: @@ -767,7 +768,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } s.Took = value case float64: diff --git a/typedapi/core/search/search.go b/typedapi/core/search/search.go index 674b89e8ac..88b54be1c1 100644 --- a/typedapi/core/search/search.go +++ b/typedapi/core/search/search.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns results matching a query. package search diff --git a/typedapi/core/searchmvt/request.go b/typedapi/core/searchmvt/request.go index c362efaca4..6a1c975f2f 100644 --- a/typedapi/core/searchmvt/request.go +++ b/typedapi/core/searchmvt/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package searchmvt @@ -35,7 +35,7 @@ import ( // Request holds the request body struct for the package searchmvt // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search_mvt/SearchMvtRequest.ts#L33-L188 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search_mvt/SearchMvtRequest.ts#L33-L188 type Request struct { // Aggs Sub-aggregations for the geotile_grid. @@ -143,7 +143,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Aggs = make(map[string]types.Aggregations, 0) } if err := dec.Decode(&s.Aggs); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggs", err) } case "buffer": @@ -154,7 +154,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Buffer", err) } s.Buffer = &value case float64: @@ -169,7 +169,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ExactBounds", err) } s.ExactBounds = &value case bool: @@ -184,7 +184,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Extent", err) } s.Extent = &value case float64: @@ -198,19 +198,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } s.Fields = append(s.Fields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } } case "grid_agg": if err := dec.Decode(&s.GridAgg); err != nil { - return err + return fmt.Errorf("%s | %w", "GridAgg", err) } case "grid_precision": @@ -221,7 +221,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "GridPrecision", err) } s.GridPrecision = &value case float64: @@ -231,17 +231,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "grid_type": if err := dec.Decode(&s.GridType); err != nil { - return err + return fmt.Errorf("%s | %w", "GridType", err) } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } case "size": @@ -252,7 +252,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -266,19 +266,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(types.SortCombinations) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } s.Sort = append(s.Sort, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } } case "track_total_hits": if err := dec.Decode(&s.TrackTotalHits); err != nil { - return err + return fmt.Errorf("%s | %w", "TrackTotalHits", err) } case "with_labels": @@ -288,7 +288,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "WithLabels", err) } s.WithLabels = &value case bool: diff --git a/typedapi/core/searchmvt/response.go b/typedapi/core/searchmvt/response.go index 84a0a91753..4da925d188 100644 --- a/typedapi/core/searchmvt/response.go +++ b/typedapi/core/searchmvt/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package searchmvt // Response holds the response body struct for the package searchmvt // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search_mvt/SearchMvtResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search_mvt/SearchMvtResponse.ts#L22-L24 type Response = []byte diff --git a/typedapi/core/searchmvt/search_mvt.go b/typedapi/core/searchmvt/search_mvt.go index 0fe65f8d8a..53c3f13c8e 100644 --- a/typedapi/core/searchmvt/search_mvt.go +++ b/typedapi/core/searchmvt/search_mvt.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Searches a vector tile for geospatial values. Returns results as a binary // Mapbox vector tile. diff --git a/typedapi/core/searchshards/response.go b/typedapi/core/searchshards/response.go index 46d77e161a..8573312cf4 100644 --- a/typedapi/core/searchshards/response.go +++ b/typedapi/core/searchshards/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package searchshards @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package searchshards // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search_shards/SearchShardsResponse.ts#L25-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search_shards/SearchShardsResponse.ts#L25-L31 type Response struct { Indices map[string]types.ShardStoreIndex `json:"indices"` Nodes map[string]types.NodeAttributes `json:"nodes"` diff --git a/typedapi/core/searchshards/search_shards.go b/typedapi/core/searchshards/search_shards.go index 6ce85ceddb..2251956bfa 100644 --- a/typedapi/core/searchshards/search_shards.go +++ b/typedapi/core/searchshards/search_shards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about the indices and shards that a search request would // be executed against. diff --git a/typedapi/core/searchtemplate/request.go b/typedapi/core/searchtemplate/request.go index a15c98460f..672d693578 100644 --- a/typedapi/core/searchtemplate/request.go +++ b/typedapi/core/searchtemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package searchtemplate @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package searchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search_template/SearchTemplateRequest.ts#L32-L134 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search_template/SearchTemplateRequest.ts#L32-L134 type Request struct { // Explain If `true`, returns detailed information about score calculation as part of @@ -93,7 +93,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Explain", err) } s.Explain = &value case bool: @@ -102,7 +102,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "params": @@ -110,7 +110,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "profile": @@ -120,7 +120,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } s.Profile = &value case bool: @@ -130,7 +130,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "source": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/core/searchtemplate/response.go b/typedapi/core/searchtemplate/response.go index ec22a882c0..0a334c231b 100644 --- a/typedapi/core/searchtemplate/response.go +++ b/typedapi/core/searchtemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package searchtemplate @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" "strings" @@ -33,7 +34,7 @@ import ( // Response holds the response body struct for the package searchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search_template/SearchTemplateResponse.ts#L30-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search_template/SearchTemplateResponse.ts#L30-L48 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` Clusters_ *types.ClusterStatistics `json:"_clusters,omitempty"` @@ -100,490 +101,490 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "cardinality": o := types.NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := types.NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := types.NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := types.NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := types.NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := types.NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := types.NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := types.NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := types.NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := types.NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := types.NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := types.NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := types.NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := types.NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := types.NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := types.NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := types.NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := types.NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := types.NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := types.NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := types.NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := types.NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := types.NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := types.NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := types.NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := types.NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := types.NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := types.NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := types.NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := types.NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := types.NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := types.NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := types.NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := types.NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := types.NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := types.NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := types.NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := types.NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := types.NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := types.NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := types.NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := types.NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := types.NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := types.NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := types.NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := types.NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := types.NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := types.NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := types.NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := types.NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := types.NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := types.NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := types.NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := types.NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := types.NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := types.NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := types.NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := types.NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := types.NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := types.NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := types.NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := types.NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := types.NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := types.NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := types.NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := types.NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := types.NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := types.NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := types.NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -593,7 +594,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } @@ -602,7 +603,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "_clusters": if err := dec.Decode(&s.Clusters_); err != nil { - return err + return fmt.Errorf("%s | %w", "Clusters_", err) } case "fields": @@ -610,12 +611,12 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "hits": if err := dec.Decode(&s.Hits); err != nil { - return err + return fmt.Errorf("%s | %w", "Hits", err) } case "max_score": @@ -625,7 +626,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxScore", err) } f := types.Float64(value) s.MaxScore = &f @@ -641,7 +642,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumReducePhases", err) } s.NumReducePhases = &value case float64: @@ -651,22 +652,22 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "pit_id": if err := dec.Decode(&s.PitId); err != nil { - return err + return fmt.Errorf("%s | %w", "PitId", err) } case "profile": if err := dec.Decode(&s.Profile); err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } case "_scroll_id": if err := dec.Decode(&s.ScrollId_); err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollId_", err) } case "_shards": if err := dec.Decode(&s.Shards_); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards_", err) } case "suggest": @@ -694,28 +695,28 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "completion": o := types.NewCompletionSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "phrase": o := types.NewPhraseSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "term": o := types.NewTermSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) } @@ -725,7 +726,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[value] = append(s.Suggest[value], o) } @@ -739,7 +740,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TerminatedEarly", err) } s.TerminatedEarly = &value case bool: @@ -753,7 +754,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimedOut", err) } s.TimedOut = value case bool: @@ -767,7 +768,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } s.Took = value case float64: diff --git a/typedapi/core/searchtemplate/search_template.go b/typedapi/core/searchtemplate/search_template.go index f242d0349b..44526d5765 100644 --- a/typedapi/core/searchtemplate/search_template.go +++ b/typedapi/core/searchtemplate/search_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows to use the Mustache language to pre-render a search definition. package searchtemplate diff --git a/typedapi/core/termsenum/request.go b/typedapi/core/termsenum/request.go index 539a978e5d..9f4f6d3cba 100644 --- a/typedapi/core/termsenum/request.go +++ b/typedapi/core/termsenum/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package termsenum @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package termsenum // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/terms_enum/TermsEnumRequest.ts#L26-L65 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/terms_enum/TermsEnumRequest.ts#L26-L65 type Request struct { // CaseInsensitive When true the provided search string is matched against index terms without @@ -96,7 +96,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CaseInsensitive", err) } s.CaseInsensitive = &value case bool: @@ -105,18 +105,18 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "index_filter": if err := dec.Decode(&s.IndexFilter); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexFilter", err) } case "search_after": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchAfter", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -133,7 +133,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -144,7 +144,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "String", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -155,7 +155,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "timeout": if err := dec.Decode(&s.Timeout); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } } diff --git a/typedapi/core/termsenum/response.go b/typedapi/core/termsenum/response.go index 43da8d6969..57568614b0 100644 --- a/typedapi/core/termsenum/response.go +++ b/typedapi/core/termsenum/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package termsenum @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package termsenum // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/terms_enum/TermsEnumResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/terms_enum/TermsEnumResponse.ts#L22-L28 type Response struct { Complete bool `json:"complete"` Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/core/termsenum/terms_enum.go b/typedapi/core/termsenum/terms_enum.go index 325c0bee87..87630550a0 100644 --- a/typedapi/core/termsenum/terms_enum.go +++ b/typedapi/core/termsenum/terms_enum.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // The terms enum API can be used to discover terms in the index that begin // with the provided string. It is designed for low-latency look-ups used in diff --git a/typedapi/core/termvectors/request.go b/typedapi/core/termvectors/request.go index 9525c7a8a4..59fb93a632 100644 --- a/typedapi/core/termvectors/request.go +++ b/typedapi/core/termvectors/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package termvectors @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package termvectors // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/termvectors/TermVectorsRequest.ts#L33-L118 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/termvectors/TermVectorsRequest.ts#L33-L118 type Request struct { // Doc An artificial document (a document not present in the index) for which you diff --git a/typedapi/core/termvectors/response.go b/typedapi/core/termvectors/response.go index 39fe3de625..c87948ae30 100644 --- a/typedapi/core/termvectors/response.go +++ b/typedapi/core/termvectors/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package termvectors @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package termvectors // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/termvectors/TermVectorsResponse.ts#L25-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/termvectors/TermVectorsResponse.ts#L25-L34 type Response struct { Found bool `json:"found"` Id_ string `json:"_id"` diff --git a/typedapi/core/termvectors/termvectors.go b/typedapi/core/termvectors/termvectors.go index aac93366b5..252d2b1cda 100644 --- a/typedapi/core/termvectors/termvectors.go +++ b/typedapi/core/termvectors/termvectors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information and statistics about terms in the fields of a particular // document. diff --git a/typedapi/core/update/request.go b/typedapi/core/update/request.go index efd4e8921f..c88337d200 100644 --- a/typedapi/core/update/request.go +++ b/typedapi/core/update/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package update @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package update // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/update/UpdateRequest.ts#L38-L151 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/update/UpdateRequest.ts#L38-L151 type Request struct { // DetectNoop Set to false to disable setting 'result' in the response @@ -96,7 +96,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DetectNoop", err) } s.DetectNoop = &value case bool: @@ -105,7 +105,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "doc": if err := dec.Decode(&s.Doc); err != nil { - return err + return fmt.Errorf("%s | %w", "Doc", err) } case "doc_as_upsert": @@ -115,7 +115,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocAsUpsert", err) } s.DocAsUpsert = &value case bool: @@ -125,7 +125,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -134,7 +134,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -143,7 +143,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { o := types.NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -151,7 +151,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { o := types.NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -165,7 +165,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptedUpsert", err) } s.ScriptedUpsert = &value case bool: @@ -174,12 +174,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } case "upsert": if err := dec.Decode(&s.Upsert); err != nil { - return err + return fmt.Errorf("%s | %w", "Upsert", err) } } diff --git a/typedapi/core/update/response.go b/typedapi/core/update/response.go index e83532bcd1..8a348bc0eb 100644 --- a/typedapi/core/update/response.go +++ b/typedapi/core/update/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package update @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package update // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/update/UpdateResponse.ts#L27-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/update/UpdateResponse.ts#L27-L29 type Response struct { ForcedRefresh *bool `json:"forced_refresh,omitempty"` Get *types.InlineGet `json:"get,omitempty"` diff --git a/typedapi/core/update/update.go b/typedapi/core/update/update.go index 15a049ce36..cd37aab172 100644 --- a/typedapi/core/update/update.go +++ b/typedapi/core/update/update.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates a document with a script or partial document. package update diff --git a/typedapi/core/updatebyquery/request.go b/typedapi/core/updatebyquery/request.go index 2002c0630c..2f6d9bc929 100644 --- a/typedapi/core/updatebyquery/request.go +++ b/typedapi/core/updatebyquery/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatebyquery @@ -34,7 +34,7 @@ import ( // Request holds the request body struct for the package updatebyquery // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/update_by_query/UpdateByQueryRequest.ts#L37-L221 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/update_by_query/UpdateByQueryRequest.ts#L37-L221 type Request struct { // Conflicts What to do if update by query hits version conflicts: `abort` or `proceed`. @@ -84,7 +84,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "conflicts": if err := dec.Decode(&s.Conflicts); err != nil { - return err + return fmt.Errorf("%s | %w", "Conflicts", err) } case "max_docs": @@ -94,7 +94,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxDocs", err) } s.MaxDocs = &value case float64: @@ -104,13 +104,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -119,7 +119,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -128,7 +128,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { o := types.NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -136,7 +136,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { o := types.NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -145,7 +145,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "slice": if err := dec.Decode(&s.Slice); err != nil { - return err + return fmt.Errorf("%s | %w", "Slice", err) } } diff --git a/typedapi/core/updatebyquery/response.go b/typedapi/core/updatebyquery/response.go index 0a98793da9..e386e2e46e 100644 --- a/typedapi/core/updatebyquery/response.go +++ b/typedapi/core/updatebyquery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatebyquery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatebyquery // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/update_by_query/UpdateByQueryResponse.ts#L26-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/update_by_query/UpdateByQueryResponse.ts#L26-L45 type Response struct { Batches *int64 `json:"batches,omitempty"` Deleted *int64 `json:"deleted,omitempty"` diff --git a/typedapi/core/updatebyquery/update_by_query.go b/typedapi/core/updatebyquery/update_by_query.go index 96048bb555..ecc797cca4 100644 --- a/typedapi/core/updatebyquery/update_by_query.go +++ b/typedapi/core/updatebyquery/update_by_query.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates documents that match the specified query. If no query is specified, // diff --git a/typedapi/core/updatebyqueryrethrottle/response.go b/typedapi/core/updatebyqueryrethrottle/response.go index 1068036ceb..1e19dc63a8 100644 --- a/typedapi/core/updatebyqueryrethrottle/response.go +++ b/typedapi/core/updatebyqueryrethrottle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatebyqueryrethrottle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatebyqueryrethrottle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/update_by_query_rethrottle/UpdateByQueryRethrottleResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/update_by_query_rethrottle/UpdateByQueryRethrottleResponse.ts#L23-L25 type Response struct { Nodes map[string]types.UpdateByQueryRethrottleNode `json:"nodes"` } diff --git a/typedapi/core/updatebyqueryrethrottle/update_by_query_rethrottle.go b/typedapi/core/updatebyqueryrethrottle/update_by_query_rethrottle.go index f1c0eb6cfb..7ff5331e46 100644 --- a/typedapi/core/updatebyqueryrethrottle/update_by_query_rethrottle.go +++ b/typedapi/core/updatebyqueryrethrottle/update_by_query_rethrottle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Changes the number of requests per second for a particular Update By Query // operation. diff --git a/typedapi/danglingindices/deletedanglingindex/delete_dangling_index.go b/typedapi/danglingindices/deletedanglingindex/delete_dangling_index.go index 7b9cff545a..4c2f0d9677 100644 --- a/typedapi/danglingindices/deletedanglingindex/delete_dangling_index.go +++ b/typedapi/danglingindices/deletedanglingindex/delete_dangling_index.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes the specified dangling index package deletedanglingindex diff --git a/typedapi/danglingindices/deletedanglingindex/response.go b/typedapi/danglingindices/deletedanglingindex/response.go index d9dd977407..9115ec7cef 100644 --- a/typedapi/danglingindices/deletedanglingindex/response.go +++ b/typedapi/danglingindices/deletedanglingindex/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletedanglingindex // Response holds the response body struct for the package deletedanglingindex // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/dangling_indices/delete_dangling_index/DeleteDanglingIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/dangling_indices/delete_dangling_index/DeleteDanglingIndexResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/danglingindices/importdanglingindex/import_dangling_index.go b/typedapi/danglingindices/importdanglingindex/import_dangling_index.go index 52b7e36d46..0cf31f9af2 100644 --- a/typedapi/danglingindices/importdanglingindex/import_dangling_index.go +++ b/typedapi/danglingindices/importdanglingindex/import_dangling_index.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Imports the specified dangling index package importdanglingindex diff --git a/typedapi/danglingindices/importdanglingindex/response.go b/typedapi/danglingindices/importdanglingindex/response.go index 2ae8200c8d..dd9fdf88e2 100644 --- a/typedapi/danglingindices/importdanglingindex/response.go +++ b/typedapi/danglingindices/importdanglingindex/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package importdanglingindex // Response holds the response body struct for the package importdanglingindex // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/dangling_indices/import_dangling_index/ImportDanglingIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/dangling_indices/import_dangling_index/ImportDanglingIndexResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/danglingindices/listdanglingindices/list_dangling_indices.go b/typedapi/danglingindices/listdanglingindices/list_dangling_indices.go index aa8c6db8cc..91cbce95a5 100644 --- a/typedapi/danglingindices/listdanglingindices/list_dangling_indices.go +++ b/typedapi/danglingindices/listdanglingindices/list_dangling_indices.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns all dangling indices. package listdanglingindices diff --git a/typedapi/danglingindices/listdanglingindices/response.go b/typedapi/danglingindices/listdanglingindices/response.go index 130fc10bca..780fedd5f5 100644 --- a/typedapi/danglingindices/listdanglingindices/response.go +++ b/typedapi/danglingindices/listdanglingindices/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package listdanglingindices @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package listdanglingindices // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/dangling_indices/list_dangling_indices/ListDanglingIndicesResponse.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/dangling_indices/list_dangling_indices/ListDanglingIndicesResponse.ts#L23-L27 type Response struct { DanglingIndices []types.DanglingIndex `json:"dangling_indices"` } diff --git a/typedapi/enrich/deletepolicy/delete_policy.go b/typedapi/enrich/deletepolicy/delete_policy.go index d7e00ed291..558d505cb1 100644 --- a/typedapi/enrich/deletepolicy/delete_policy.go +++ b/typedapi/enrich/deletepolicy/delete_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an existing enrich policy and its enrich index. package deletepolicy diff --git a/typedapi/enrich/deletepolicy/response.go b/typedapi/enrich/deletepolicy/response.go index 39e95fb0d6..6ca93dfecf 100644 --- a/typedapi/enrich/deletepolicy/response.go +++ b/typedapi/enrich/deletepolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletepolicy // Response holds the response body struct for the package deletepolicy // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/delete_policy/DeleteEnrichPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/delete_policy/DeleteEnrichPolicyResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/enrich/executepolicy/execute_policy.go b/typedapi/enrich/executepolicy/execute_policy.go index e38e8cdbd7..ff914708d2 100644 --- a/typedapi/enrich/executepolicy/execute_policy.go +++ b/typedapi/enrich/executepolicy/execute_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates the enrich index for an existing enrich policy. package executepolicy diff --git a/typedapi/enrich/executepolicy/response.go b/typedapi/enrich/executepolicy/response.go index dca2c7cbb0..5d0fc1e704 100644 --- a/typedapi/enrich/executepolicy/response.go +++ b/typedapi/enrich/executepolicy/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package executepolicy @@ -26,10 +26,10 @@ import ( // Response holds the response body struct for the package executepolicy // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/execute_policy/ExecuteEnrichPolicyResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/execute_policy/ExecuteEnrichPolicyResponse.ts#L23-L28 type Response struct { - Status types.ExecuteEnrichPolicyStatus `json:"status"` - TaskId types.TaskId `json:"task_id,omitempty"` + Status *types.ExecuteEnrichPolicyStatus `json:"status,omitempty"` + TaskId types.TaskId `json:"task_id,omitempty"` } // NewResponse returns a Response diff --git a/typedapi/enrich/getpolicy/get_policy.go b/typedapi/enrich/getpolicy/get_policy.go index c3400c8f0a..bcbdf65296 100644 --- a/typedapi/enrich/getpolicy/get_policy.go +++ b/typedapi/enrich/getpolicy/get_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Gets information about an enrich policy. package getpolicy diff --git a/typedapi/enrich/getpolicy/response.go b/typedapi/enrich/getpolicy/response.go index 3de32a4609..69adc52a18 100644 --- a/typedapi/enrich/getpolicy/response.go +++ b/typedapi/enrich/getpolicy/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getpolicy @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/get_policy/GetEnrichPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/get_policy/GetEnrichPolicyResponse.ts#L22-L24 type Response struct { Policies []types.Summary `json:"policies"` } diff --git a/typedapi/enrich/putpolicy/put_policy.go b/typedapi/enrich/putpolicy/put_policy.go index d081f43752..50701d39f3 100644 --- a/typedapi/enrich/putpolicy/put_policy.go +++ b/typedapi/enrich/putpolicy/put_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a new enrich policy. package putpolicy diff --git a/typedapi/enrich/putpolicy/request.go b/typedapi/enrich/putpolicy/request.go index 7010e8f405..e397a917d1 100644 --- a/typedapi/enrich/putpolicy/request.go +++ b/typedapi/enrich/putpolicy/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putpolicy @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/put_policy/PutEnrichPolicyRequest.ts#L24-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/put_policy/PutEnrichPolicyRequest.ts#L24-L52 type Request struct { // GeoMatch Matches enrich data to incoming documents based on a `geo_shape` query. diff --git a/typedapi/enrich/putpolicy/response.go b/typedapi/enrich/putpolicy/response.go index 8a3403db2a..9499a2a968 100644 --- a/typedapi/enrich/putpolicy/response.go +++ b/typedapi/enrich/putpolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putpolicy // Response holds the response body struct for the package putpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/put_policy/PutEnrichPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/put_policy/PutEnrichPolicyResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/enrich/stats/response.go b/typedapi/enrich/stats/response.go index ffce76c021..74f3d2a912 100644 --- a/typedapi/enrich/stats/response.go +++ b/typedapi/enrich/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/stats/EnrichStatsResponse.ts#L22-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/stats/EnrichStatsResponse.ts#L22-L39 type Response struct { // CacheStats Objects containing information about the enrich cache stats on each ingest diff --git a/typedapi/enrich/stats/stats.go b/typedapi/enrich/stats/stats.go index 031d981d1b..39fe1c27a9 100644 --- a/typedapi/enrich/stats/stats.go +++ b/typedapi/enrich/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Gets enrich coordinator statistics and information about enrich policies that // are currently executing. diff --git a/typedapi/eql/delete/delete.go b/typedapi/eql/delete/delete.go index 315865b40c..1aa2dce13a 100644 --- a/typedapi/eql/delete/delete.go +++ b/typedapi/eql/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an async EQL search by ID. If the search is still running, the search // request will be cancelled. Otherwise, the saved search results are deleted. diff --git a/typedapi/eql/delete/response.go b/typedapi/eql/delete/response.go index 910829ce09..e42a9aa0f6 100644 --- a/typedapi/eql/delete/response.go +++ b/typedapi/eql/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/eql/delete/EqlDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/eql/delete/EqlDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/eql/get/get.go b/typedapi/eql/get/get.go index 27e67593d0..b446208ec4 100644 --- a/typedapi/eql/get/get.go +++ b/typedapi/eql/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns async results from previously executed Event Query Language (EQL) // search diff --git a/typedapi/eql/get/response.go b/typedapi/eql/get/response.go index 72bddaa86e..f4c77beef3 100644 --- a/typedapi/eql/get/response.go +++ b/typedapi/eql/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/eql/get/EqlGetResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/eql/get/EqlGetResponse.ts#L22-L24 type Response struct { // Hits Contains matching events and sequences. Also contains related metadata. diff --git a/typedapi/eql/getstatus/get_status.go b/typedapi/eql/getstatus/get_status.go index 3c42cd5105..fdb2557605 100644 --- a/typedapi/eql/getstatus/get_status.go +++ b/typedapi/eql/getstatus/get_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the status of a previously submitted async or stored Event Query // Language (EQL) search diff --git a/typedapi/eql/getstatus/response.go b/typedapi/eql/getstatus/response.go index 8559acc1c4..c172f74e83 100644 --- a/typedapi/eql/getstatus/response.go +++ b/typedapi/eql/getstatus/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getstatus // Response holds the response body struct for the package getstatus // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/eql/get_status/EqlGetStatusResponse.ts#L24-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/eql/get_status/EqlGetStatusResponse.ts#L24-L51 type Response struct { // CompletionStatus For a completed search shows the http status code of the completed search. diff --git a/typedapi/eql/search/request.go b/typedapi/eql/search/request.go index 0691f8ff32..4fd3cc1325 100644 --- a/typedapi/eql/search/request.go +++ b/typedapi/eql/search/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package search @@ -34,7 +34,7 @@ import ( // Request holds the request body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/eql/search/EqlSearchRequest.ts#L28-L118 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/eql/search/EqlSearchRequest.ts#L28-L118 type Request struct { CaseSensitive *bool `json:"case_sensitive,omitempty"` // EventCategoryField Field containing the event classification, such as process, file, or network. @@ -102,7 +102,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CaseSensitive", err) } s.CaseSensitive = &value case bool: @@ -111,12 +111,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "event_category_field": if err := dec.Decode(&s.EventCategoryField); err != nil { - return err + return fmt.Errorf("%s | %w", "EventCategoryField", err) } case "fetch_size": if err := dec.Decode(&s.FetchSize); err != nil { - return err + return fmt.Errorf("%s | %w", "FetchSize", err) } case "fields": @@ -125,13 +125,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := types.NewFieldAndFormat() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } s.Fields = append(s.Fields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } } @@ -141,19 +141,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := types.NewQuery() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } s.Filter = append(s.Filter, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } } case "keep_alive": if err := dec.Decode(&s.KeepAlive); err != nil { - return err + return fmt.Errorf("%s | %w", "KeepAlive", err) } case "keep_on_completion": @@ -163,7 +163,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "KeepOnCompletion", err) } s.KeepOnCompletion = &value case bool: @@ -173,7 +173,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -184,32 +184,32 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "result_position": if err := dec.Decode(&s.ResultPosition); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultPosition", err) } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } case "size": if err := dec.Decode(&s.Size); err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } case "tiebreaker_field": if err := dec.Decode(&s.TiebreakerField); err != nil { - return err + return fmt.Errorf("%s | %w", "TiebreakerField", err) } case "timestamp_field": if err := dec.Decode(&s.TimestampField); err != nil { - return err + return fmt.Errorf("%s | %w", "TimestampField", err) } case "wait_for_completion_timeout": if err := dec.Decode(&s.WaitForCompletionTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "WaitForCompletionTimeout", err) } } diff --git a/typedapi/eql/search/response.go b/typedapi/eql/search/response.go index 17c8acc13e..baad6041db 100644 --- a/typedapi/eql/search/response.go +++ b/typedapi/eql/search/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package search @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/eql/search/EqlSearchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/eql/search/EqlSearchResponse.ts#L22-L24 type Response struct { // Hits Contains matching events and sequences. Also contains related metadata. diff --git a/typedapi/eql/search/search.go b/typedapi/eql/search/search.go index 8334a75df1..c61d87bd4c 100644 --- a/typedapi/eql/search/search.go +++ b/typedapi/eql/search/search.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns results matching a query expressed in Event Query Language (EQL) package search diff --git a/typedapi/esql/query/query.go b/typedapi/esql/query/query.go index 1df9943c89..55986e9d2a 100644 --- a/typedapi/esql/query/query.go +++ b/typedapi/esql/query/query.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Executes an ESQL request package query diff --git a/typedapi/esql/query/request.go b/typedapi/esql/query/request.go index be462302bf..890edbe4df 100644 --- a/typedapi/esql/query/request.go +++ b/typedapi/esql/query/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package query @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package query // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/esql/query/QueryRequest.ts#L24-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/esql/query/QueryRequest.ts#L24-L64 type Request struct { // Columnar By default, ES|QL returns results as rows. For example, FROM returns each diff --git a/typedapi/esql/query/response.go b/typedapi/esql/query/response.go index 4453121bee..43469af553 100644 --- a/typedapi/esql/query/response.go +++ b/typedapi/esql/query/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package query // Response holds the response body struct for the package query // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/esql/query/QueryResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/esql/query/QueryResponse.ts#L22-L24 type Response = []byte diff --git a/typedapi/features/getfeatures/get_features.go b/typedapi/features/getfeatures/get_features.go index 6ee4b0a8ac..3d60cabf1d 100644 --- a/typedapi/features/getfeatures/get_features.go +++ b/typedapi/features/getfeatures/get_features.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Gets a list of features which can be included in snapshots using the // feature_states field when creating a snapshot diff --git a/typedapi/features/getfeatures/response.go b/typedapi/features/getfeatures/response.go index 425c1f598c..1e94ca0714 100644 --- a/typedapi/features/getfeatures/response.go +++ b/typedapi/features/getfeatures/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getfeatures @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getfeatures // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/features/get_features/GetFeaturesResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/features/get_features/GetFeaturesResponse.ts#L22-L26 type Response struct { Features []types.Feature `json:"features"` } diff --git a/typedapi/features/resetfeatures/reset_features.go b/typedapi/features/resetfeatures/reset_features.go index b0da1bfb5e..dd6c384d19 100644 --- a/typedapi/features/resetfeatures/reset_features.go +++ b/typedapi/features/resetfeatures/reset_features.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Resets the internal state of features, usually by deleting system indices package resetfeatures diff --git a/typedapi/features/resetfeatures/response.go b/typedapi/features/resetfeatures/response.go index daf1bf440b..aa1eb10a76 100644 --- a/typedapi/features/resetfeatures/response.go +++ b/typedapi/features/resetfeatures/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package resetfeatures @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package resetfeatures // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/features/reset_features/ResetFeaturesResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/features/reset_features/ResetFeaturesResponse.ts#L22-L26 type Response struct { Features []types.Feature `json:"features"` } diff --git a/typedapi/fleet/globalcheckpoints/global_checkpoints.go b/typedapi/fleet/globalcheckpoints/global_checkpoints.go index dfc5631653..df9e60c9f6 100644 --- a/typedapi/fleet/globalcheckpoints/global_checkpoints.go +++ b/typedapi/fleet/globalcheckpoints/global_checkpoints.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the current global checkpoints for an index. This API is design for // internal use by the fleet server project. diff --git a/typedapi/fleet/globalcheckpoints/response.go b/typedapi/fleet/globalcheckpoints/response.go index 6b2a982767..b4d47e07a1 100644 --- a/typedapi/fleet/globalcheckpoints/response.go +++ b/typedapi/fleet/globalcheckpoints/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package globalcheckpoints // Response holds the response body struct for the package globalcheckpoints // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/fleet/global_checkpoints/GlobalCheckpointsResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/fleet/global_checkpoints/GlobalCheckpointsResponse.ts#L22-L27 type Response struct { GlobalCheckpoints []int64 `json:"global_checkpoints"` TimedOut bool `json:"timed_out"` diff --git a/typedapi/fleet/msearch/msearch.go b/typedapi/fleet/msearch/msearch.go index efd2e2ebf8..337203f408 100644 --- a/typedapi/fleet/msearch/msearch.go +++ b/typedapi/fleet/msearch/msearch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Multi Search API where the search will only be executed after specified // checkpoints are available due to a refresh. This API is designed for internal diff --git a/typedapi/fleet/msearch/request.go b/typedapi/fleet/msearch/request.go index f5255e5362..5b52795898 100644 --- a/typedapi/fleet/msearch/request.go +++ b/typedapi/fleet/msearch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package msearch @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package msearch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/fleet/msearch/MultiSearchRequest.ts#L32-L115 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/fleet/msearch/MultiSearchRequest.ts#L32-L115 type Request = []types.MsearchRequestItem diff --git a/typedapi/fleet/msearch/response.go b/typedapi/fleet/msearch/response.go index febb6b7e7a..17da93d2e3 100644 --- a/typedapi/fleet/msearch/response.go +++ b/typedapi/fleet/msearch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package msearch @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types" @@ -31,7 +32,7 @@ import ( // Response holds the response body struct for the package msearch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/fleet/msearch/MultiSearchResponse.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/fleet/msearch/MultiSearchResponse.ts#L25-L29 type Response struct { Docs []types.MsearchResponseItem `json:"docs"` } @@ -59,7 +60,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "docs": messageArray := []json.RawMessage{} if err := dec.Decode(&messageArray); err != nil { - return err + return fmt.Errorf("%s | %w", "Docs", err) } docs: for _, message := range messageArray { @@ -70,7 +71,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Docs", err) } switch t { @@ -79,7 +80,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { o := types.NewMultiSearchItem() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Docs", err) } s.Docs = append(s.Docs, o) continue docs @@ -88,7 +89,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { o := types.NewErrorResponseBase() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Docs", err) } s.Docs = append(s.Docs, o) continue docs diff --git a/typedapi/fleet/postsecret/post_secret.go b/typedapi/fleet/postsecret/post_secret.go index d08ea44cbe..cfcf657b25 100644 --- a/typedapi/fleet/postsecret/post_secret.go +++ b/typedapi/fleet/postsecret/post_secret.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a secret stored by Fleet. package postsecret diff --git a/typedapi/fleet/search/request.go b/typedapi/fleet/search/request.go index e6aa6ec05a..a7ecc4e924 100644 --- a/typedapi/fleet/search/request.go +++ b/typedapi/fleet/search/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package search @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/fleet/search/SearchRequest.ts#L55-L260 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/fleet/search/SearchRequest.ts#L55-L260 type Request struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Collapse *types.FieldCollapse `json:"collapse,omitempty"` @@ -163,17 +163,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Aggregations = make(map[string]types.Aggregations, 0) } if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "collapse": if err := dec.Decode(&s.Collapse); err != nil { - return err + return fmt.Errorf("%s | %w", "Collapse", err) } case "docvalue_fields": if err := dec.Decode(&s.DocvalueFields); err != nil { - return err + return fmt.Errorf("%s | %w", "DocvalueFields", err) } case "explain": @@ -183,7 +183,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Explain", err) } s.Explain = &value case bool: @@ -195,12 +195,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Ext = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Ext); err != nil { - return err + return fmt.Errorf("%s | %w", "Ext", err) } case "fields": if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "from": @@ -211,7 +211,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } s.From = &value case float64: @@ -221,12 +221,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "highlight": if err := dec.Decode(&s.Highlight); err != nil { - return err + return fmt.Errorf("%s | %w", "Highlight", err) } case "indices_boost": if err := dec.Decode(&s.IndicesBoost); err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesBoost", err) } case "min_score": @@ -236,7 +236,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinScore", err) } f := types.Float64(value) s.MinScore = &f @@ -247,12 +247,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "pit": if err := dec.Decode(&s.Pit); err != nil { - return err + return fmt.Errorf("%s | %w", "Pit", err) } case "post_filter": if err := dec.Decode(&s.PostFilter); err != nil { - return err + return fmt.Errorf("%s | %w", "PostFilter", err) } case "profile": @@ -262,7 +262,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } s.Profile = &value case bool: @@ -271,7 +271,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "rescore": @@ -280,19 +280,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := types.NewRescore() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Rescore", err) } s.Rescore = append(s.Rescore, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Rescore); err != nil { - return err + return fmt.Errorf("%s | %w", "Rescore", err) } } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } case "script_fields": @@ -300,12 +300,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.ScriptFields = make(map[string]types.ScriptField, 0) } if err := dec.Decode(&s.ScriptFields); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptFields", err) } case "search_after": if err := dec.Decode(&s.SearchAfter); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchAfter", err) } case "seq_no_primary_term": @@ -315,7 +315,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNoPrimaryTerm", err) } s.SeqNoPrimaryTerm = &value case bool: @@ -330,7 +330,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -340,7 +340,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "slice": if err := dec.Decode(&s.Slice); err != nil { - return err + return fmt.Errorf("%s | %w", "Slice", err) } case "sort": @@ -349,24 +349,24 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(types.SortCombinations) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } s.Sort = append(s.Sort, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } case "stats": if err := dec.Decode(&s.Stats); err != nil { - return err + return fmt.Errorf("%s | %w", "Stats", err) } case "stored_fields": @@ -375,19 +375,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } s.StoredFields = append(s.StoredFields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StoredFields); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } } case "suggest": if err := dec.Decode(&s.Suggest); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } case "terminate_after": @@ -397,7 +397,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TerminateAfter", err) } s.TerminateAfter = &value case float64: @@ -408,7 +408,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "timeout": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -424,7 +424,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TrackScores", err) } s.TrackScores = &value case bool: @@ -433,7 +433,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "track_total_hits": if err := dec.Decode(&s.TrackTotalHits); err != nil { - return err + return fmt.Errorf("%s | %w", "TrackTotalHits", err) } case "version": @@ -443,7 +443,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } s.Version = &value case bool: diff --git a/typedapi/fleet/search/response.go b/typedapi/fleet/search/response.go index d28b4c6da5..1db2ee1fc5 100644 --- a/typedapi/fleet/search/response.go +++ b/typedapi/fleet/search/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package search @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" "strings" @@ -33,7 +34,7 @@ import ( // Response holds the response body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/fleet/search/SearchResponse.ts#L33-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/fleet/search/SearchResponse.ts#L33-L50 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` Clusters_ *types.ClusterStatistics `json:"_clusters,omitempty"` @@ -100,490 +101,490 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "cardinality": o := types.NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := types.NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := types.NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := types.NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := types.NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := types.NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := types.NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := types.NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := types.NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := types.NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := types.NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := types.NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := types.NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := types.NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := types.NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := types.NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := types.NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := types.NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := types.NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := types.NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := types.NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := types.NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := types.NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := types.NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := types.NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := types.NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := types.NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := types.NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := types.NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := types.NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := types.NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := types.NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := types.NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := types.NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := types.NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := types.NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := types.NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := types.NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := types.NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := types.NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := types.NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := types.NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := types.NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := types.NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := types.NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := types.NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := types.NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := types.NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := types.NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := types.NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := types.NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := types.NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := types.NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := types.NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := types.NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := types.NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := types.NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := types.NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := types.NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := types.NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := types.NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := types.NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := types.NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := types.NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := types.NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := types.NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := types.NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := types.NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := types.NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -593,7 +594,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } @@ -602,7 +603,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "_clusters": if err := dec.Decode(&s.Clusters_); err != nil { - return err + return fmt.Errorf("%s | %w", "Clusters_", err) } case "fields": @@ -610,12 +611,12 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "hits": if err := dec.Decode(&s.Hits); err != nil { - return err + return fmt.Errorf("%s | %w", "Hits", err) } case "max_score": @@ -625,7 +626,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxScore", err) } f := types.Float64(value) s.MaxScore = &f @@ -641,7 +642,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumReducePhases", err) } s.NumReducePhases = &value case float64: @@ -651,22 +652,22 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "pit_id": if err := dec.Decode(&s.PitId); err != nil { - return err + return fmt.Errorf("%s | %w", "PitId", err) } case "profile": if err := dec.Decode(&s.Profile); err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } case "_scroll_id": if err := dec.Decode(&s.ScrollId_); err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollId_", err) } case "_shards": if err := dec.Decode(&s.Shards_); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards_", err) } case "suggest": @@ -694,28 +695,28 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "completion": o := types.NewCompletionSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "phrase": o := types.NewPhraseSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "term": o := types.NewTermSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) } @@ -725,7 +726,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[value] = append(s.Suggest[value], o) } @@ -739,7 +740,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TerminatedEarly", err) } s.TerminatedEarly = &value case bool: @@ -753,7 +754,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimedOut", err) } s.TimedOut = value case bool: @@ -767,7 +768,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } s.Took = value case float64: diff --git a/typedapi/fleet/search/search.go b/typedapi/fleet/search/search.go index dcd793f938..fcc68414c5 100644 --- a/typedapi/fleet/search/search.go +++ b/typedapi/fleet/search/search.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Search API where the search will only be executed after specified checkpoints // are available due to a refresh. This API is designed for internal use by the diff --git a/typedapi/graph/explore/explore.go b/typedapi/graph/explore/explore.go index 2cbd01cc20..cad8e29788 100644 --- a/typedapi/graph/explore/explore.go +++ b/typedapi/graph/explore/explore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Explore extracted and summarized information about the documents and terms in // an index. diff --git a/typedapi/graph/explore/request.go b/typedapi/graph/explore/request.go index ad8650574b..79a9e639ec 100644 --- a/typedapi/graph/explore/request.go +++ b/typedapi/graph/explore/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package explore @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package explore // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/graph/explore/GraphExploreRequest.ts#L28-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/graph/explore/GraphExploreRequest.ts#L28-L72 type Request struct { // Connections Specifies or more fields from which you want to extract terms that are diff --git a/typedapi/graph/explore/response.go b/typedapi/graph/explore/response.go index 8bc77d81e9..bffdf22caa 100644 --- a/typedapi/graph/explore/response.go +++ b/typedapi/graph/explore/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package explore @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package explore // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/graph/explore/GraphExploreResponse.ts#L25-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/graph/explore/GraphExploreResponse.ts#L25-L33 type Response struct { Connections []types.Connection `json:"connections"` Failures []types.ShardFailure `json:"failures"` diff --git a/typedapi/ilm/deletelifecycle/delete_lifecycle.go b/typedapi/ilm/deletelifecycle/delete_lifecycle.go index b0023e0187..2bdb41fc21 100644 --- a/typedapi/ilm/deletelifecycle/delete_lifecycle.go +++ b/typedapi/ilm/deletelifecycle/delete_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes the specified lifecycle policy definition. A currently used policy // cannot be deleted. diff --git a/typedapi/ilm/deletelifecycle/response.go b/typedapi/ilm/deletelifecycle/response.go index a1b2c9dd47..9a28f73eb3 100644 --- a/typedapi/ilm/deletelifecycle/response.go +++ b/typedapi/ilm/deletelifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletelifecycle // Response holds the response body struct for the package deletelifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/delete_lifecycle/DeleteLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/delete_lifecycle/DeleteLifecycleResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ilm/explainlifecycle/explain_lifecycle.go b/typedapi/ilm/explainlifecycle/explain_lifecycle.go index 6bbf0dbc27..6530bbd851 100644 --- a/typedapi/ilm/explainlifecycle/explain_lifecycle.go +++ b/typedapi/ilm/explainlifecycle/explain_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information about the index's current lifecycle state, such as the // currently executing phase, action, and step. diff --git a/typedapi/ilm/explainlifecycle/response.go b/typedapi/ilm/explainlifecycle/response.go index c87cdc7679..225a11665e 100644 --- a/typedapi/ilm/explainlifecycle/response.go +++ b/typedapi/ilm/explainlifecycle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package explainlifecycle @@ -31,7 +31,7 @@ import ( // Response holds the response body struct for the package explainlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/explain_lifecycle/ExplainLifecycleResponse.ts#L24-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/explain_lifecycle/ExplainLifecycleResponse.ts#L24-L28 type Response struct { Indices map[string]types.LifecycleExplain `json:"indices"` } diff --git a/typedapi/ilm/getlifecycle/get_lifecycle.go b/typedapi/ilm/getlifecycle/get_lifecycle.go index 0d68bcc65e..fcab1aaee0 100644 --- a/typedapi/ilm/getlifecycle/get_lifecycle.go +++ b/typedapi/ilm/getlifecycle/get_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the specified policy definition. Includes the policy version and last // modified date. diff --git a/typedapi/ilm/getlifecycle/response.go b/typedapi/ilm/getlifecycle/response.go index 103372bda5..b4e8852553 100644 --- a/typedapi/ilm/getlifecycle/response.go +++ b/typedapi/ilm/getlifecycle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getlifecycle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/get_lifecycle/GetLifecycleResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/get_lifecycle/GetLifecycleResponse.ts#L23-L25 type Response map[string]types.Lifecycle diff --git a/typedapi/ilm/getstatus/get_status.go b/typedapi/ilm/getstatus/get_status.go index 55875deffa..c2d8b8c16f 100644 --- a/typedapi/ilm/getstatus/get_status.go +++ b/typedapi/ilm/getstatus/get_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves the current index lifecycle management (ILM) status. package getstatus diff --git a/typedapi/ilm/getstatus/response.go b/typedapi/ilm/getstatus/response.go index fa037dd484..5f12707c8a 100644 --- a/typedapi/ilm/getstatus/response.go +++ b/typedapi/ilm/getstatus/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getstatus @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getstatus // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/get_status/GetIlmStatusResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/get_status/GetIlmStatusResponse.ts#L22-L24 type Response struct { OperationMode lifecycleoperationmode.LifecycleOperationMode `json:"operation_mode"` } diff --git a/typedapi/ilm/migratetodatatiers/migrate_to_data_tiers.go b/typedapi/ilm/migratetodatatiers/migrate_to_data_tiers.go index 27fb993614..81d81d727b 100644 --- a/typedapi/ilm/migratetodatatiers/migrate_to_data_tiers.go +++ b/typedapi/ilm/migratetodatatiers/migrate_to_data_tiers.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Migrates the indices and ILM policies away from custom node attribute // allocation routing to data tiers routing diff --git a/typedapi/ilm/migratetodatatiers/request.go b/typedapi/ilm/migratetodatatiers/request.go index 132e6a7dec..0cf70e99a8 100644 --- a/typedapi/ilm/migratetodatatiers/request.go +++ b/typedapi/ilm/migratetodatatiers/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package migratetodatatiers @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package migratetodatatiers // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/migrate_to_data_tiers/Request.ts#L22-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/migrate_to_data_tiers/Request.ts#L22-L43 type Request struct { LegacyTemplateToDelete *string `json:"legacy_template_to_delete,omitempty"` NodeAttribute *string `json:"node_attribute,omitempty"` diff --git a/typedapi/ilm/migratetodatatiers/response.go b/typedapi/ilm/migratetodatatiers/response.go index 90c6318923..2cc2106b93 100644 --- a/typedapi/ilm/migratetodatatiers/response.go +++ b/typedapi/ilm/migratetodatatiers/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package migratetodatatiers @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Response holds the response body struct for the package migratetodatatiers // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/migrate_to_data_tiers/Response.ts#L22-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/migrate_to_data_tiers/Response.ts#L22-L32 type Response struct { DryRun bool `json:"dry_run"` MigratedComponentTemplates []string `json:"migrated_component_templates"` @@ -68,7 +69,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DryRun", err) } s.DryRun = value case bool: @@ -77,17 +78,17 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "migrated_component_templates": if err := dec.Decode(&s.MigratedComponentTemplates); err != nil { - return err + return fmt.Errorf("%s | %w", "MigratedComponentTemplates", err) } case "migrated_composable_templates": if err := dec.Decode(&s.MigratedComposableTemplates); err != nil { - return err + return fmt.Errorf("%s | %w", "MigratedComposableTemplates", err) } case "migrated_ilm_policies": if err := dec.Decode(&s.MigratedIlmPolicies); err != nil { - return err + return fmt.Errorf("%s | %w", "MigratedIlmPolicies", err) } case "migrated_indices": @@ -96,25 +97,25 @@ func (s *Response) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "MigratedIndices", err) } s.MigratedIndices = append(s.MigratedIndices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.MigratedIndices); err != nil { - return err + return fmt.Errorf("%s | %w", "MigratedIndices", err) } } case "migrated_legacy_templates": if err := dec.Decode(&s.MigratedLegacyTemplates); err != nil { - return err + return fmt.Errorf("%s | %w", "MigratedLegacyTemplates", err) } case "removed_legacy_template": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RemovedLegacyTemplate", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/ilm/movetostep/move_to_step.go b/typedapi/ilm/movetostep/move_to_step.go index 85c2b2a3a8..f4b9c688ec 100644 --- a/typedapi/ilm/movetostep/move_to_step.go +++ b/typedapi/ilm/movetostep/move_to_step.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Manually moves an index into the specified step and executes that step. package movetostep diff --git a/typedapi/ilm/movetostep/request.go b/typedapi/ilm/movetostep/request.go index 89a1c27830..2dce1e58de 100644 --- a/typedapi/ilm/movetostep/request.go +++ b/typedapi/ilm/movetostep/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package movetostep @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package movetostep // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/move_to_step/MoveToStepRequest.ts#L24-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/move_to_step/MoveToStepRequest.ts#L24-L36 type Request struct { CurrentStep *types.StepKey `json:"current_step,omitempty"` NextStep *types.StepKey `json:"next_step,omitempty"` diff --git a/typedapi/ilm/movetostep/response.go b/typedapi/ilm/movetostep/response.go index 30005c6f97..4b6d92f7cf 100644 --- a/typedapi/ilm/movetostep/response.go +++ b/typedapi/ilm/movetostep/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package movetostep // Response holds the response body struct for the package movetostep // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/move_to_step/MoveToStepResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/move_to_step/MoveToStepResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ilm/putlifecycle/put_lifecycle.go b/typedapi/ilm/putlifecycle/put_lifecycle.go index 54a21095b6..6ba4f52983 100644 --- a/typedapi/ilm/putlifecycle/put_lifecycle.go +++ b/typedapi/ilm/putlifecycle/put_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a lifecycle policy package putlifecycle diff --git a/typedapi/ilm/putlifecycle/request.go b/typedapi/ilm/putlifecycle/request.go index 1f674b6ba4..65a4346e97 100644 --- a/typedapi/ilm/putlifecycle/request.go +++ b/typedapi/ilm/putlifecycle/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putlifecycle @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/put_lifecycle/PutLifecycleRequest.ts#L25-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/put_lifecycle/PutLifecycleRequest.ts#L25-L55 type Request struct { Policy *types.IlmPolicy `json:"policy,omitempty"` } diff --git a/typedapi/ilm/putlifecycle/response.go b/typedapi/ilm/putlifecycle/response.go index e5f0c01e8f..5ee8a93345 100644 --- a/typedapi/ilm/putlifecycle/response.go +++ b/typedapi/ilm/putlifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putlifecycle // Response holds the response body struct for the package putlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/put_lifecycle/PutLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/put_lifecycle/PutLifecycleResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ilm/removepolicy/remove_policy.go b/typedapi/ilm/removepolicy/remove_policy.go index f01b198ba9..ad5b26cfd7 100644 --- a/typedapi/ilm/removepolicy/remove_policy.go +++ b/typedapi/ilm/removepolicy/remove_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Removes the assigned lifecycle policy and stops managing the specified index package removepolicy diff --git a/typedapi/ilm/removepolicy/response.go b/typedapi/ilm/removepolicy/response.go index dc3d0cc605..f98fb408a2 100644 --- a/typedapi/ilm/removepolicy/response.go +++ b/typedapi/ilm/removepolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package removepolicy // Response holds the response body struct for the package removepolicy // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/remove_policy/RemovePolicyResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/remove_policy/RemovePolicyResponse.ts#L22-L27 type Response struct { FailedIndexes []string `json:"failed_indexes"` HasFailures bool `json:"has_failures"` diff --git a/typedapi/ilm/retry/response.go b/typedapi/ilm/retry/response.go index 5ea7c7db3c..97e4f6e1c0 100644 --- a/typedapi/ilm/retry/response.go +++ b/typedapi/ilm/retry/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package retry // Response holds the response body struct for the package retry // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/retry/RetryIlmResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/retry/RetryIlmResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ilm/retry/retry.go b/typedapi/ilm/retry/retry.go index 553b91cafb..99d5ac514e 100644 --- a/typedapi/ilm/retry/retry.go +++ b/typedapi/ilm/retry/retry.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retries executing the policy for an index that is in the ERROR step. package retry diff --git a/typedapi/ilm/start/response.go b/typedapi/ilm/start/response.go index 2ed39d7427..ff39badc72 100644 --- a/typedapi/ilm/start/response.go +++ b/typedapi/ilm/start/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package start // Response holds the response body struct for the package start // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/start/StartIlmResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/start/StartIlmResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ilm/start/start.go b/typedapi/ilm/start/start.go index dbae0ec595..f1bdf75c91 100644 --- a/typedapi/ilm/start/start.go +++ b/typedapi/ilm/start/start.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Start the index lifecycle management (ILM) plugin. package start diff --git a/typedapi/ilm/stop/response.go b/typedapi/ilm/stop/response.go index 42f0753003..25389226dd 100644 --- a/typedapi/ilm/stop/response.go +++ b/typedapi/ilm/stop/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stop // Response holds the response body struct for the package stop // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/stop/StopIlmResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/stop/StopIlmResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ilm/stop/stop.go b/typedapi/ilm/stop/stop.go index 93c5a37441..fb804d6cbb 100644 --- a/typedapi/ilm/stop/stop.go +++ b/typedapi/ilm/stop/stop.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Halts all lifecycle management operations and stops the index lifecycle // management (ILM) plugin diff --git a/typedapi/indices/addblock/add_block.go b/typedapi/indices/addblock/add_block.go index a7bb44ecbc..d180296fb3 100644 --- a/typedapi/indices/addblock/add_block.go +++ b/typedapi/indices/addblock/add_block.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Adds a block to an index. package addblock diff --git a/typedapi/indices/addblock/response.go b/typedapi/indices/addblock/response.go index 05e1e98e81..706d6ae7a5 100644 --- a/typedapi/indices/addblock/response.go +++ b/typedapi/indices/addblock/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package addblock @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package addblock // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/add_block/IndicesAddBlockResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/add_block/IndicesAddBlockResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` Indices []types.IndicesBlockStatus `json:"indices"` diff --git a/typedapi/indices/analyze/analyze.go b/typedapi/indices/analyze/analyze.go index e95c2539b7..0f6703b8cf 100644 --- a/typedapi/indices/analyze/analyze.go +++ b/typedapi/indices/analyze/analyze.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Performs the analysis process on a text and return the tokens breakdown of // the text. diff --git a/typedapi/indices/analyze/request.go b/typedapi/indices/analyze/request.go index 183f937c35..e0154c134e 100644 --- a/typedapi/indices/analyze/request.go +++ b/typedapi/indices/analyze/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package analyze @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package analyze // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/analyze/IndicesAnalyzeRequest.ts#L27-L92 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/analyze/IndicesAnalyzeRequest.ts#L27-L92 type Request struct { // Analyzer The name of the analyzer that should be applied to the provided `text`. @@ -98,7 +98,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,7 +109,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "attributes": if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "char_filter": @@ -170,7 +170,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) o := new(interface{}) if err := json.NewDecoder(source).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CharFilter", err) } s.CharFilter = append(s.CharFilter, *o) } @@ -183,7 +183,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Explain", err) } s.Explain = &value case bool: @@ -192,7 +192,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "filter": @@ -505,7 +505,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) o := new(interface{}) if err := json.NewDecoder(source).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } s.Filter = append(s.Filter, *o) } @@ -514,7 +514,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "normalizer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Normalizer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -529,13 +529,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } s.Text = append(s.Text, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Text); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } } @@ -645,7 +645,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } default: if err := localDec.Decode(&s.Tokenizer); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenizer", err) } } diff --git a/typedapi/indices/analyze/response.go b/typedapi/indices/analyze/response.go index f130f95996..d6fbd05157 100644 --- a/typedapi/indices/analyze/response.go +++ b/typedapi/indices/analyze/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package analyze @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package analyze // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/analyze/IndicesAnalyzeResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/analyze/IndicesAnalyzeResponse.ts#L22-L27 type Response struct { Detail *types.AnalyzeDetail `json:"detail,omitempty"` Tokens []types.AnalyzeToken `json:"tokens,omitempty"` diff --git a/typedapi/indices/clearcache/clear_cache.go b/typedapi/indices/clearcache/clear_cache.go index 5817ee44b5..ae5bab2c2e 100644 --- a/typedapi/indices/clearcache/clear_cache.go +++ b/typedapi/indices/clearcache/clear_cache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Clears all or specific caches for one or more indices. package clearcache diff --git a/typedapi/indices/clearcache/response.go b/typedapi/indices/clearcache/response.go index 4c7d7b4860..0dd3ced353 100644 --- a/typedapi/indices/clearcache/response.go +++ b/typedapi/indices/clearcache/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clearcache @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcache // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/clear_cache/IndicesClearCacheResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/clear_cache/IndicesClearCacheResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` } diff --git a/typedapi/indices/clone/clone.go b/typedapi/indices/clone/clone.go index 2b3452ad3f..22d020faa4 100644 --- a/typedapi/indices/clone/clone.go +++ b/typedapi/indices/clone/clone.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Clones an index package clone diff --git a/typedapi/indices/clone/request.go b/typedapi/indices/clone/request.go index da1daef2dd..8bbcd7aa8a 100644 --- a/typedapi/indices/clone/request.go +++ b/typedapi/indices/clone/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clone @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package clone // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/clone/IndicesCloneRequest.ts#L27-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/clone/IndicesCloneRequest.ts#L27-L75 type Request struct { // Aliases Aliases for the resulting index. diff --git a/typedapi/indices/clone/response.go b/typedapi/indices/clone/response.go index 8e2ce29d9b..236bfde943 100644 --- a/typedapi/indices/clone/response.go +++ b/typedapi/indices/clone/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clone // Response holds the response body struct for the package clone // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/clone/IndicesCloneResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/clone/IndicesCloneResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` Index string `json:"index"` diff --git a/typedapi/indices/close/close.go b/typedapi/indices/close/close.go index e73c6a1159..e872f69b98 100644 --- a/typedapi/indices/close/close.go +++ b/typedapi/indices/close/close.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Closes an index. package close diff --git a/typedapi/indices/close/response.go b/typedapi/indices/close/response.go index 0250a16a8e..d59537433c 100644 --- a/typedapi/indices/close/response.go +++ b/typedapi/indices/close/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package close @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package close // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/close/CloseIndexResponse.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/close/CloseIndexResponse.ts#L24-L30 type Response struct { Acknowledged bool `json:"acknowledged"` Indices map[string]types.CloseIndexResult `json:"indices"` diff --git a/typedapi/indices/create/create.go b/typedapi/indices/create/create.go index 0bfda2a5e9..5bb21b5ab2 100644 --- a/typedapi/indices/create/create.go +++ b/typedapi/indices/create/create.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates an index with optional settings and mappings. package create diff --git a/typedapi/indices/create/request.go b/typedapi/indices/create/request.go index f0cb1b6bcc..08df8999f2 100644 --- a/typedapi/indices/create/request.go +++ b/typedapi/indices/create/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package create @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/create/IndicesCreateRequest.ts#L28-L81 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/create/IndicesCreateRequest.ts#L28-L81 type Request struct { // Aliases Aliases for the index. diff --git a/typedapi/indices/create/response.go b/typedapi/indices/create/response.go index 08f9f5e60a..7c4f40c48f 100644 --- a/typedapi/indices/create/response.go +++ b/typedapi/indices/create/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package create // Response holds the response body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/create/IndicesCreateResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/create/IndicesCreateResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` Index string `json:"index"` diff --git a/typedapi/indices/createdatastream/create_data_stream.go b/typedapi/indices/createdatastream/create_data_stream.go index c6152ce4de..47317b0b81 100644 --- a/typedapi/indices/createdatastream/create_data_stream.go +++ b/typedapi/indices/createdatastream/create_data_stream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a data stream package createdatastream diff --git a/typedapi/indices/createdatastream/response.go b/typedapi/indices/createdatastream/response.go index 95c7d2eea2..b1be03427a 100644 --- a/typedapi/indices/createdatastream/response.go +++ b/typedapi/indices/createdatastream/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package createdatastream // Response holds the response body struct for the package createdatastream // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/create_data_stream/IndicesCreateDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/create_data_stream/IndicesCreateDataStreamResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/datastreamsstats/data_streams_stats.go b/typedapi/indices/datastreamsstats/data_streams_stats.go index f358b8b7ab..0c93d1afab 100644 --- a/typedapi/indices/datastreamsstats/data_streams_stats.go +++ b/typedapi/indices/datastreamsstats/data_streams_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Provides statistics on operations happening in a data stream. package datastreamsstats diff --git a/typedapi/indices/datastreamsstats/response.go b/typedapi/indices/datastreamsstats/response.go index 9d16b5afce..13f834179a 100644 --- a/typedapi/indices/datastreamsstats/response.go +++ b/typedapi/indices/datastreamsstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package datastreamsstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package datastreamsstats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/data_streams_stats/IndicesDataStreamsStatsResponse.ts#L25-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/data_streams_stats/IndicesDataStreamsStatsResponse.ts#L25-L43 type Response struct { // BackingIndices Total number of backing indices for the selected data streams. @@ -38,7 +38,7 @@ type Response struct { // Shards_ Contains information about shards that attempted to execute the request. Shards_ types.ShardStatistics `json:"_shards"` // TotalStoreSizeBytes Total size, in bytes, of all shards for the selected data streams. - TotalStoreSizeBytes int `json:"total_store_size_bytes"` + TotalStoreSizeBytes int64 `json:"total_store_size_bytes"` // TotalStoreSizes Total size of all shards for the selected data streams. // This property is included only if the `human` query parameter is `true` TotalStoreSizes types.ByteSize `json:"total_store_sizes,omitempty"` diff --git a/typedapi/indices/delete/delete.go b/typedapi/indices/delete/delete.go index fc9f50ae07..6314b38e34 100644 --- a/typedapi/indices/delete/delete.go +++ b/typedapi/indices/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an index. package delete diff --git a/typedapi/indices/delete/response.go b/typedapi/indices/delete/response.go index 71727ea2ab..d84167f8cc 100644 --- a/typedapi/indices/delete/response.go +++ b/typedapi/indices/delete/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package delete @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/delete/IndicesDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/delete/IndicesDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/deletealias/delete_alias.go b/typedapi/indices/deletealias/delete_alias.go index 630e34ad9c..c88950f1fb 100644 --- a/typedapi/indices/deletealias/delete_alias.go +++ b/typedapi/indices/deletealias/delete_alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an alias. package deletealias diff --git a/typedapi/indices/deletealias/response.go b/typedapi/indices/deletealias/response.go index e4b05fff7e..7783c9b28c 100644 --- a/typedapi/indices/deletealias/response.go +++ b/typedapi/indices/deletealias/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletealias // Response holds the response body struct for the package deletealias // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/delete_alias/IndicesDeleteAliasResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/delete_alias/IndicesDeleteAliasResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/deletedatalifecycle/delete_data_lifecycle.go b/typedapi/indices/deletedatalifecycle/delete_data_lifecycle.go index 5c9d7abdb6..b9de66e16c 100644 --- a/typedapi/indices/deletedatalifecycle/delete_data_lifecycle.go +++ b/typedapi/indices/deletedatalifecycle/delete_data_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes the data stream lifecycle of the selected data streams. package deletedatalifecycle diff --git a/typedapi/indices/deletedatalifecycle/response.go b/typedapi/indices/deletedatalifecycle/response.go index c426e8b2c5..38184869f6 100644 --- a/typedapi/indices/deletedatalifecycle/response.go +++ b/typedapi/indices/deletedatalifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletedatalifecycle // Response holds the response body struct for the package deletedatalifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/delete_data_lifecycle/IndicesDeleteDataLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/delete_data_lifecycle/IndicesDeleteDataLifecycleResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/deletedatastream/delete_data_stream.go b/typedapi/indices/deletedatastream/delete_data_stream.go index 891e6eb59f..792bc5c956 100644 --- a/typedapi/indices/deletedatastream/delete_data_stream.go +++ b/typedapi/indices/deletedatastream/delete_data_stream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes a data stream. package deletedatastream diff --git a/typedapi/indices/deletedatastream/response.go b/typedapi/indices/deletedatastream/response.go index 6d30a7a590..76a4d55b52 100644 --- a/typedapi/indices/deletedatastream/response.go +++ b/typedapi/indices/deletedatastream/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletedatastream // Response holds the response body struct for the package deletedatastream // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/delete_data_stream/IndicesDeleteDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/delete_data_stream/IndicesDeleteDataStreamResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/deleteindextemplate/delete_index_template.go b/typedapi/indices/deleteindextemplate/delete_index_template.go index fc5dd63f80..271ed53b3d 100644 --- a/typedapi/indices/deleteindextemplate/delete_index_template.go +++ b/typedapi/indices/deleteindextemplate/delete_index_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an index template. package deleteindextemplate diff --git a/typedapi/indices/deleteindextemplate/response.go b/typedapi/indices/deleteindextemplate/response.go index 8b74520079..e765d4ffba 100644 --- a/typedapi/indices/deleteindextemplate/response.go +++ b/typedapi/indices/deleteindextemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deleteindextemplate // Response holds the response body struct for the package deleteindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/delete_index_template/IndicesDeleteIndexTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/delete_index_template/IndicesDeleteIndexTemplateResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/deletetemplate/delete_template.go b/typedapi/indices/deletetemplate/delete_template.go index 564ad14cc8..2a22f1cd16 100644 --- a/typedapi/indices/deletetemplate/delete_template.go +++ b/typedapi/indices/deletetemplate/delete_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an index template. package deletetemplate diff --git a/typedapi/indices/deletetemplate/response.go b/typedapi/indices/deletetemplate/response.go index fd20583d62..84e079bbe8 100644 --- a/typedapi/indices/deletetemplate/response.go +++ b/typedapi/indices/deletetemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletetemplate // Response holds the response body struct for the package deletetemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/delete_template/IndicesDeleteTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/delete_template/IndicesDeleteTemplateResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/diskusage/disk_usage.go b/typedapi/indices/diskusage/disk_usage.go index 915ddf0a48..6a7ee9330b 100644 --- a/typedapi/indices/diskusage/disk_usage.go +++ b/typedapi/indices/diskusage/disk_usage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Analyzes the disk usage of each field of an index or data stream package diskusage diff --git a/typedapi/indices/diskusage/response.go b/typedapi/indices/diskusage/response.go index ac26a0cf4f..510018cde8 100644 --- a/typedapi/indices/diskusage/response.go +++ b/typedapi/indices/diskusage/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package diskusage @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package diskusage // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/disk_usage/IndicesDiskUsageResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/disk_usage/IndicesDiskUsageResponse.ts#L22-L24 type Response = json.RawMessage diff --git a/typedapi/indices/downsample/downsample.go b/typedapi/indices/downsample/downsample.go index b6a9363ad2..2ee1794e9a 100644 --- a/typedapi/indices/downsample/downsample.go +++ b/typedapi/indices/downsample/downsample.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Downsample an index package downsample diff --git a/typedapi/indices/downsample/request.go b/typedapi/indices/downsample/request.go index fb0f719a5f..83d2ee51be 100644 --- a/typedapi/indices/downsample/request.go +++ b/typedapi/indices/downsample/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package downsample @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package downsample // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/downsample/Request.ts#L24-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/downsample/Request.ts#L24-L44 type Request = types.DownsampleConfig diff --git a/typedapi/indices/downsample/response.go b/typedapi/indices/downsample/response.go index 1e2931c4fc..635e29baad 100644 --- a/typedapi/indices/downsample/response.go +++ b/typedapi/indices/downsample/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package downsample @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package downsample // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/downsample/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/downsample/Response.ts#L22-L24 type Response = json.RawMessage diff --git a/typedapi/indices/exists/exists.go b/typedapi/indices/exists/exists.go index 7d59a70d40..9a925bf4ca 100644 --- a/typedapi/indices/exists/exists.go +++ b/typedapi/indices/exists/exists.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about whether a particular index exists. package exists diff --git a/typedapi/indices/existsalias/exists_alias.go b/typedapi/indices/existsalias/exists_alias.go index bb031cbe0a..3c7c2e2624 100644 --- a/typedapi/indices/existsalias/exists_alias.go +++ b/typedapi/indices/existsalias/exists_alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about whether a particular alias exists. package existsalias diff --git a/typedapi/indices/existsindextemplate/exists_index_template.go b/typedapi/indices/existsindextemplate/exists_index_template.go index 135d0ba9ee..2fb781a1e4 100644 --- a/typedapi/indices/existsindextemplate/exists_index_template.go +++ b/typedapi/indices/existsindextemplate/exists_index_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about whether a particular index template exists. package existsindextemplate diff --git a/typedapi/indices/existstemplate/exists_template.go b/typedapi/indices/existstemplate/exists_template.go index b88e810e3e..4d574339ad 100644 --- a/typedapi/indices/existstemplate/exists_template.go +++ b/typedapi/indices/existstemplate/exists_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about whether a particular index template exists. package existstemplate diff --git a/typedapi/indices/explaindatalifecycle/explain_data_lifecycle.go b/typedapi/indices/explaindatalifecycle/explain_data_lifecycle.go index 94a077685b..ecea93d372 100644 --- a/typedapi/indices/explaindatalifecycle/explain_data_lifecycle.go +++ b/typedapi/indices/explaindatalifecycle/explain_data_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information about the index's current data stream lifecycle, such // as any potential encountered error, time since creation etc. diff --git a/typedapi/indices/explaindatalifecycle/response.go b/typedapi/indices/explaindatalifecycle/response.go index d8180b524d..573a9d4b89 100644 --- a/typedapi/indices/explaindatalifecycle/response.go +++ b/typedapi/indices/explaindatalifecycle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package explaindatalifecycle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package explaindatalifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/explain_data_lifecycle/IndicesExplainDataLifecycleResponse.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/explain_data_lifecycle/IndicesExplainDataLifecycleResponse.ts#L25-L29 type Response struct { Indices map[string]types.DataStreamLifecycleExplain `json:"indices"` } diff --git a/typedapi/indices/fieldusagestats/field_usage_stats.go b/typedapi/indices/fieldusagestats/field_usage_stats.go index ed8d237cf5..33cb360861 100644 --- a/typedapi/indices/fieldusagestats/field_usage_stats.go +++ b/typedapi/indices/fieldusagestats/field_usage_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the field usage stats for each field of an index package fieldusagestats diff --git a/typedapi/indices/fieldusagestats/response.go b/typedapi/indices/fieldusagestats/response.go index 3c4d4e9d51..5627b9c3eb 100644 --- a/typedapi/indices/fieldusagestats/response.go +++ b/typedapi/indices/fieldusagestats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package fieldusagestats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package fieldusagestats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L28-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L28-L30 type Response struct { FieldsUsageBody map[string]types.UsageStatsIndex `json:"-"` Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/indices/flush/flush.go b/typedapi/indices/flush/flush.go index cef2c26bae..55308a13d5 100644 --- a/typedapi/indices/flush/flush.go +++ b/typedapi/indices/flush/flush.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Performs the flush operation on one or more indices. package flush diff --git a/typedapi/indices/flush/response.go b/typedapi/indices/flush/response.go index dbad8ac6ae..30b66b66b9 100644 --- a/typedapi/indices/flush/response.go +++ b/typedapi/indices/flush/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package flush @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package flush // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/flush/IndicesFlushResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/flush/IndicesFlushResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` } diff --git a/typedapi/indices/forcemerge/forcemerge.go b/typedapi/indices/forcemerge/forcemerge.go index a20e37a820..7cb3d6f668 100644 --- a/typedapi/indices/forcemerge/forcemerge.go +++ b/typedapi/indices/forcemerge/forcemerge.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Performs the force merge operation on one or more indices. package forcemerge diff --git a/typedapi/indices/forcemerge/response.go b/typedapi/indices/forcemerge/response.go index f96f17d3d2..41777ec3b1 100644 --- a/typedapi/indices/forcemerge/response.go +++ b/typedapi/indices/forcemerge/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package forcemerge @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package forcemerge // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/forcemerge/IndicesForceMergeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/forcemerge/IndicesForceMergeResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` // Task task contains a task id returned when wait_for_completion=false, diff --git a/typedapi/indices/get/get.go b/typedapi/indices/get/get.go index 1d09401e66..40707c26ba 100644 --- a/typedapi/indices/get/get.go +++ b/typedapi/indices/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about one or more indices. package get diff --git a/typedapi/indices/get/response.go b/typedapi/indices/get/response.go index 0ab165d71d..e742772e1e 100644 --- a/typedapi/indices/get/response.go +++ b/typedapi/indices/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get/IndicesGetResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get/IndicesGetResponse.ts#L24-L26 type Response map[string]types.IndexState diff --git a/typedapi/indices/getalias/get_alias.go b/typedapi/indices/getalias/get_alias.go index 00dc75e598..9e00858020 100644 --- a/typedapi/indices/getalias/get_alias.go +++ b/typedapi/indices/getalias/get_alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns an alias. package getalias diff --git a/typedapi/indices/getalias/response.go b/typedapi/indices/getalias/response.go index 159deda53a..cd00625271 100644 --- a/typedapi/indices/getalias/response.go +++ b/typedapi/indices/getalias/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getalias @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getalias // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get_alias/IndicesGetAliasResponse.ts#L26-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get_alias/IndicesGetAliasResponse.ts#L26-L34 type Response map[string]types.IndexAliases diff --git a/typedapi/indices/getdatalifecycle/get_data_lifecycle.go b/typedapi/indices/getdatalifecycle/get_data_lifecycle.go index 5f4569bf59..537ae50c3f 100644 --- a/typedapi/indices/getdatalifecycle/get_data_lifecycle.go +++ b/typedapi/indices/getdatalifecycle/get_data_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the data stream lifecycle of the selected data streams. package getdatalifecycle diff --git a/typedapi/indices/getdatalifecycle/response.go b/typedapi/indices/getdatalifecycle/response.go index 30b2c9c694..fbdeb6dbfc 100644 --- a/typedapi/indices/getdatalifecycle/response.go +++ b/typedapi/indices/getdatalifecycle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getdatalifecycle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdatalifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get_data_lifecycle/IndicesGetDataLifecycleResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get_data_lifecycle/IndicesGetDataLifecycleResponse.ts#L23-L25 type Response struct { DataStreams []types.DataStreamWithLifecycle `json:"data_streams"` } diff --git a/typedapi/indices/getdatastream/get_data_stream.go b/typedapi/indices/getdatastream/get_data_stream.go index 03ef7b0aef..ede151031d 100644 --- a/typedapi/indices/getdatastream/get_data_stream.go +++ b/typedapi/indices/getdatastream/get_data_stream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns data streams. package getdatastream diff --git a/typedapi/indices/getdatastream/response.go b/typedapi/indices/getdatastream/response.go index a0e299aebd..f012cae5b3 100644 --- a/typedapi/indices/getdatastream/response.go +++ b/typedapi/indices/getdatastream/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getdatastream @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdatastream // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get_data_stream/IndicesGetDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get_data_stream/IndicesGetDataStreamResponse.ts#L22-L24 type Response struct { DataStreams []types.DataStream `json:"data_streams"` } diff --git a/typedapi/indices/getfieldmapping/get_field_mapping.go b/typedapi/indices/getfieldmapping/get_field_mapping.go index 95fa0cc00a..aa313677b9 100644 --- a/typedapi/indices/getfieldmapping/get_field_mapping.go +++ b/typedapi/indices/getfieldmapping/get_field_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns mapping for one or more fields. package getfieldmapping diff --git a/typedapi/indices/getfieldmapping/response.go b/typedapi/indices/getfieldmapping/response.go index 284527acce..dca9f98070 100644 --- a/typedapi/indices/getfieldmapping/response.go +++ b/typedapi/indices/getfieldmapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getfieldmapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getfieldmapping // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get_field_mapping/IndicesGetFieldMappingResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get_field_mapping/IndicesGetFieldMappingResponse.ts#L24-L26 type Response map[string]types.TypeFieldMappings diff --git a/typedapi/indices/getindextemplate/get_index_template.go b/typedapi/indices/getindextemplate/get_index_template.go index d839f4c34e..2e9d56404a 100644 --- a/typedapi/indices/getindextemplate/get_index_template.go +++ b/typedapi/indices/getindextemplate/get_index_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns an index template. package getindextemplate diff --git a/typedapi/indices/getindextemplate/response.go b/typedapi/indices/getindextemplate/response.go index 59c1396a04..7232f2e6f2 100644 --- a/typedapi/indices/getindextemplate/response.go +++ b/typedapi/indices/getindextemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getindextemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get_index_template/IndicesGetIndexTemplateResponse.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get_index_template/IndicesGetIndexTemplateResponse.ts#L23-L27 type Response struct { IndexTemplates []types.IndexTemplateItem `json:"index_templates"` } diff --git a/typedapi/indices/getmapping/get_mapping.go b/typedapi/indices/getmapping/get_mapping.go index 60c77be87b..b38cffbe5d 100644 --- a/typedapi/indices/getmapping/get_mapping.go +++ b/typedapi/indices/getmapping/get_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns mappings for one or more indices. package getmapping diff --git a/typedapi/indices/getmapping/response.go b/typedapi/indices/getmapping/response.go index 0076f37130..3021f16a53 100644 --- a/typedapi/indices/getmapping/response.go +++ b/typedapi/indices/getmapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getmapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getmapping // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get_mapping/IndicesGetMappingResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get_mapping/IndicesGetMappingResponse.ts#L24-L26 type Response map[string]types.IndexMappingRecord diff --git a/typedapi/indices/getsettings/get_settings.go b/typedapi/indices/getsettings/get_settings.go index f473ed6b0f..b0b6923eb3 100644 --- a/typedapi/indices/getsettings/get_settings.go +++ b/typedapi/indices/getsettings/get_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns settings for one or more indices. package getsettings diff --git a/typedapi/indices/getsettings/response.go b/typedapi/indices/getsettings/response.go index f8b57fdf66..2dd05a55d6 100644 --- a/typedapi/indices/getsettings/response.go +++ b/typedapi/indices/getsettings/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getsettings @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getsettings // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get_settings/IndicesGetSettingsResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get_settings/IndicesGetSettingsResponse.ts#L24-L26 type Response map[string]types.IndexState diff --git a/typedapi/indices/gettemplate/get_template.go b/typedapi/indices/gettemplate/get_template.go index 8e2cb46265..aec8fd3a5b 100644 --- a/typedapi/indices/gettemplate/get_template.go +++ b/typedapi/indices/gettemplate/get_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns an index template. package gettemplate diff --git a/typedapi/indices/gettemplate/response.go b/typedapi/indices/gettemplate/response.go index 1f40d6dbe0..d43614bfcc 100644 --- a/typedapi/indices/gettemplate/response.go +++ b/typedapi/indices/gettemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package gettemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get_template/IndicesGetTemplateResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get_template/IndicesGetTemplateResponse.ts#L23-L25 type Response map[string]types.TemplateMapping diff --git a/typedapi/indices/migratetodatastream/migrate_to_data_stream.go b/typedapi/indices/migratetodatastream/migrate_to_data_stream.go index 3e19225117..62279ffd31 100644 --- a/typedapi/indices/migratetodatastream/migrate_to_data_stream.go +++ b/typedapi/indices/migratetodatastream/migrate_to_data_stream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Migrates an alias to a data stream package migratetodatastream diff --git a/typedapi/indices/migratetodatastream/response.go b/typedapi/indices/migratetodatastream/response.go index 00f514c99e..37ee4021d3 100644 --- a/typedapi/indices/migratetodatastream/response.go +++ b/typedapi/indices/migratetodatastream/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package migratetodatastream // Response holds the response body struct for the package migratetodatastream // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/migrate_to_data_stream/IndicesMigrateToDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/migrate_to_data_stream/IndicesMigrateToDataStreamResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/modifydatastream/modify_data_stream.go b/typedapi/indices/modifydatastream/modify_data_stream.go index 7b8a79d767..9a2f3666df 100644 --- a/typedapi/indices/modifydatastream/modify_data_stream.go +++ b/typedapi/indices/modifydatastream/modify_data_stream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Modifies a data stream package modifydatastream diff --git a/typedapi/indices/modifydatastream/request.go b/typedapi/indices/modifydatastream/request.go index 436cd7fbd5..3b31844705 100644 --- a/typedapi/indices/modifydatastream/request.go +++ b/typedapi/indices/modifydatastream/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package modifydatastream @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package modifydatastream // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/modify_data_stream/IndicesModifyDataStreamRequest.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/modify_data_stream/IndicesModifyDataStreamRequest.ts#L23-L36 type Request struct { // Actions Actions to perform. diff --git a/typedapi/indices/modifydatastream/response.go b/typedapi/indices/modifydatastream/response.go index 30b6f1f3e5..32e03b3f3c 100644 --- a/typedapi/indices/modifydatastream/response.go +++ b/typedapi/indices/modifydatastream/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package modifydatastream // Response holds the response body struct for the package modifydatastream // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/modify_data_stream/IndicesModifyDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/modify_data_stream/IndicesModifyDataStreamResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/open/open.go b/typedapi/indices/open/open.go index c0904ccf8e..709cf624f8 100644 --- a/typedapi/indices/open/open.go +++ b/typedapi/indices/open/open.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Opens an index. package open diff --git a/typedapi/indices/open/response.go b/typedapi/indices/open/response.go index 21dceb5c32..fffa08c216 100644 --- a/typedapi/indices/open/response.go +++ b/typedapi/indices/open/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package open // Response holds the response body struct for the package open // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/open/IndicesOpenResponse.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/open/IndicesOpenResponse.ts#L20-L25 type Response struct { Acknowledged bool `json:"acknowledged"` ShardsAcknowledged bool `json:"shards_acknowledged"` diff --git a/typedapi/indices/promotedatastream/promote_data_stream.go b/typedapi/indices/promotedatastream/promote_data_stream.go index 1bcb8b5c2f..8af7b1c376 100644 --- a/typedapi/indices/promotedatastream/promote_data_stream.go +++ b/typedapi/indices/promotedatastream/promote_data_stream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Promotes a data stream from a replicated data stream managed by CCR to a // regular data stream diff --git a/typedapi/indices/promotedatastream/response.go b/typedapi/indices/promotedatastream/response.go index cf43b76e3f..10b534a128 100644 --- a/typedapi/indices/promotedatastream/response.go +++ b/typedapi/indices/promotedatastream/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package promotedatastream @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package promotedatastream // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/promote_data_stream/IndicesPromoteDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/promote_data_stream/IndicesPromoteDataStreamResponse.ts#L22-L24 type Response = json.RawMessage diff --git a/typedapi/indices/putalias/put_alias.go b/typedapi/indices/putalias/put_alias.go index f7e85f0d3e..30dfca7854 100644 --- a/typedapi/indices/putalias/put_alias.go +++ b/typedapi/indices/putalias/put_alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates or updates an alias. package putalias diff --git a/typedapi/indices/putalias/request.go b/typedapi/indices/putalias/request.go index c8d84bf25d..ab222e31fc 100644 --- a/typedapi/indices/putalias/request.go +++ b/typedapi/indices/putalias/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putalias @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putalias // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/put_alias/IndicesPutAliasRequest.ts#L25-L91 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/put_alias/IndicesPutAliasRequest.ts#L25-L91 type Request struct { // Filter Query used to limit documents the alias can access. @@ -93,12 +93,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "index_routing": if err := dec.Decode(&s.IndexRouting); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexRouting", err) } case "is_write_index": @@ -108,7 +108,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsWriteIndex", err) } s.IsWriteIndex = &value case bool: @@ -117,12 +117,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "search_routing": if err := dec.Decode(&s.SearchRouting); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchRouting", err) } } diff --git a/typedapi/indices/putalias/response.go b/typedapi/indices/putalias/response.go index aa7e5068a4..46186e16b0 100644 --- a/typedapi/indices/putalias/response.go +++ b/typedapi/indices/putalias/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putalias // Response holds the response body struct for the package putalias // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/put_alias/IndicesPutAliasResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/put_alias/IndicesPutAliasResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/putdatalifecycle/put_data_lifecycle.go b/typedapi/indices/putdatalifecycle/put_data_lifecycle.go index b04e30f005..86af456b90 100644 --- a/typedapi/indices/putdatalifecycle/put_data_lifecycle.go +++ b/typedapi/indices/putdatalifecycle/put_data_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates the data stream lifecycle of the selected data streams. package putdatalifecycle diff --git a/typedapi/indices/putdatalifecycle/request.go b/typedapi/indices/putdatalifecycle/request.go index 97868d66e5..9de1b9c681 100644 --- a/typedapi/indices/putdatalifecycle/request.go +++ b/typedapi/indices/putdatalifecycle/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putdatalifecycle @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package putdatalifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/put_data_lifecycle/IndicesPutDataLifecycleRequest.ts#L25-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/put_data_lifecycle/IndicesPutDataLifecycleRequest.ts#L25-L75 type Request struct { // DataRetention If defined, every document added to this data stream will be stored at least @@ -80,12 +80,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "data_retention": if err := dec.Decode(&s.DataRetention); err != nil { - return err + return fmt.Errorf("%s | %w", "DataRetention", err) } case "downsampling": if err := dec.Decode(&s.Downsampling); err != nil { - return err + return fmt.Errorf("%s | %w", "Downsampling", err) } } diff --git a/typedapi/indices/putdatalifecycle/response.go b/typedapi/indices/putdatalifecycle/response.go index 6dd310615e..fff61ece34 100644 --- a/typedapi/indices/putdatalifecycle/response.go +++ b/typedapi/indices/putdatalifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putdatalifecycle // Response holds the response body struct for the package putdatalifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/put_data_lifecycle/IndicesPutDataLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/put_data_lifecycle/IndicesPutDataLifecycleResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/putindextemplate/put_index_template.go b/typedapi/indices/putindextemplate/put_index_template.go index 670e3aa820..e0d00227d6 100644 --- a/typedapi/indices/putindextemplate/put_index_template.go +++ b/typedapi/indices/putindextemplate/put_index_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates or updates an index template. package putindextemplate diff --git a/typedapi/indices/putindextemplate/request.go b/typedapi/indices/putindextemplate/request.go index 361c7704ac..9beeed5046 100644 --- a/typedapi/indices/putindextemplate/request.go +++ b/typedapi/indices/putindextemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putindextemplate @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/put_index_template/IndicesPutIndexTemplateRequest.ts#L36-L95 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/put_index_template/IndicesPutIndexTemplateRequest.ts#L36-L95 type Request struct { // ComposedOf An ordered list of component template names. @@ -101,12 +101,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "composed_of": if err := dec.Decode(&s.ComposedOf); err != nil { - return err + return fmt.Errorf("%s | %w", "ComposedOf", err) } case "data_stream": if err := dec.Decode(&s.DataStream); err != nil { - return err + return fmt.Errorf("%s | %w", "DataStream", err) } case "index_patterns": @@ -115,19 +115,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPatterns", err) } s.IndexPatterns = append(s.IndexPatterns, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.IndexPatterns); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPatterns", err) } } case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "priority": @@ -138,7 +138,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Priority", err) } s.Priority = &value case float64: @@ -148,12 +148,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "template": if err := dec.Decode(&s.Template); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/indices/putindextemplate/response.go b/typedapi/indices/putindextemplate/response.go index 43d0a8b566..a7397aee2f 100644 --- a/typedapi/indices/putindextemplate/response.go +++ b/typedapi/indices/putindextemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putindextemplate // Response holds the response body struct for the package putindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/put_index_template/IndicesPutIndexTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/put_index_template/IndicesPutIndexTemplateResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/putmapping/put_mapping.go b/typedapi/indices/putmapping/put_mapping.go index 6d118845c2..4eb69f4f6e 100644 --- a/typedapi/indices/putmapping/put_mapping.go +++ b/typedapi/indices/putmapping/put_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates the index mappings. package putmapping diff --git a/typedapi/indices/putmapping/request.go b/typedapi/indices/putmapping/request.go index b595ecf117..2d18a42e2a 100644 --- a/typedapi/indices/putmapping/request.go +++ b/typedapi/indices/putmapping/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putmapping @@ -34,7 +34,7 @@ import ( // Request holds the request body struct for the package putmapping // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/put_mapping/IndicesPutMappingRequest.ts#L42-L149 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/put_mapping/IndicesPutMappingRequest.ts#L42-L149 type Request struct { // DateDetection Controls whether dynamic date detection is enabled. @@ -110,7 +110,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DateDetection", err) } s.DateDetection = &value case bool: @@ -119,12 +119,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "dynamic_date_formats": if err := dec.Decode(&s.DynamicDateFormats); err != nil { - return err + return fmt.Errorf("%s | %w", "DynamicDateFormats", err) } case "dynamic_templates": @@ -137,25 +137,25 @@ func (s *Request) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]types.DynamicTemplate, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "DynamicTemplates", err) } s.DynamicTemplates = append(s.DynamicTemplates, o) case '[': o := make([]map[string]types.DynamicTemplate, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "DynamicTemplates", err) } s.DynamicTemplates = o } case "_field_names": if err := dec.Decode(&s.FieldNames_); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldNames_", err) } case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "numeric_detection": @@ -165,7 +165,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumericDetection", err) } s.NumericDetection = &value case bool: @@ -481,17 +481,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "_routing": if err := dec.Decode(&s.Routing_); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing_", err) } case "runtime": if err := dec.Decode(&s.Runtime); err != nil { - return err + return fmt.Errorf("%s | %w", "Runtime", err) } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } } diff --git a/typedapi/indices/putmapping/response.go b/typedapi/indices/putmapping/response.go index 52fa027851..7bd91d64ce 100644 --- a/typedapi/indices/putmapping/response.go +++ b/typedapi/indices/putmapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putmapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putmapping // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/put_mapping/IndicesPutMappingResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/put_mapping/IndicesPutMappingResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/putsettings/put_settings.go b/typedapi/indices/putsettings/put_settings.go index 26ca9dfdc3..122528ed94 100644 --- a/typedapi/indices/putsettings/put_settings.go +++ b/typedapi/indices/putsettings/put_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates the index settings. package putsettings @@ -745,7 +745,7 @@ func (r *PutSettings) Settings(settings *types.IndexSettings) *PutSettings { // Similarity Configure custom similarity settings to customize how search results are // scored. // API name: similarity -func (r *PutSettings) Similarity(similarity *types.SettingsSimilarity) *PutSettings { +func (r *PutSettings) Similarity(similarity map[string]types.SettingsSimilarity) *PutSettings { r.req.Similarity = similarity diff --git a/typedapi/indices/putsettings/request.go b/typedapi/indices/putsettings/request.go index 166e47cb20..f63873e587 100644 --- a/typedapi/indices/putsettings/request.go +++ b/typedapi/indices/putsettings/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putsettings @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package putsettings // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/put_settings/IndicesPutSettingsRequest.ts#L25-L92 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/put_settings/IndicesPutSettingsRequest.ts#L25-L92 type Request = types.IndexSettings diff --git a/typedapi/indices/putsettings/response.go b/typedapi/indices/putsettings/response.go index 9f4f25640b..12aa1746bb 100644 --- a/typedapi/indices/putsettings/response.go +++ b/typedapi/indices/putsettings/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putsettings // Response holds the response body struct for the package putsettings // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/put_settings/IndicesPutSettingsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/put_settings/IndicesPutSettingsResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/puttemplate/put_template.go b/typedapi/indices/puttemplate/put_template.go index 1560ff9da3..9169435712 100644 --- a/typedapi/indices/puttemplate/put_template.go +++ b/typedapi/indices/puttemplate/put_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates or updates an index template. package puttemplate diff --git a/typedapi/indices/puttemplate/request.go b/typedapi/indices/puttemplate/request.go index a750d3bf59..919de7ea2b 100644 --- a/typedapi/indices/puttemplate/request.go +++ b/typedapi/indices/puttemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package puttemplate @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package puttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/put_template/IndicesPutTemplateRequest.ts#L29-L105 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/put_template/IndicesPutTemplateRequest.ts#L29-L105 type Request struct { // Aliases Aliases for the index. @@ -96,7 +96,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Aliases = make(map[string]types.Alias, 0) } if err := dec.Decode(&s.Aliases); err != nil { - return err + return fmt.Errorf("%s | %w", "Aliases", err) } case "index_patterns": @@ -105,19 +105,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPatterns", err) } s.IndexPatterns = append(s.IndexPatterns, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.IndexPatterns); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPatterns", err) } } case "mappings": if err := dec.Decode(&s.Mappings); err != nil { - return err + return fmt.Errorf("%s | %w", "Mappings", err) } case "order": @@ -128,7 +128,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } s.Order = &value case float64: @@ -141,12 +141,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Settings = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/indices/puttemplate/response.go b/typedapi/indices/puttemplate/response.go index c1d0021e03..83db588982 100644 --- a/typedapi/indices/puttemplate/response.go +++ b/typedapi/indices/puttemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package puttemplate // Response holds the response body struct for the package puttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/put_template/IndicesPutTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/put_template/IndicesPutTemplateResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/recovery/recovery.go b/typedapi/indices/recovery/recovery.go index 957c6755e4..521f11674e 100644 --- a/typedapi/indices/recovery/recovery.go +++ b/typedapi/indices/recovery/recovery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about ongoing index shard recoveries. package recovery diff --git a/typedapi/indices/recovery/response.go b/typedapi/indices/recovery/response.go index 8bcc3d7c11..ea0703ddf5 100644 --- a/typedapi/indices/recovery/response.go +++ b/typedapi/indices/recovery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package recovery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package recovery // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/recovery/IndicesRecoveryResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/recovery/IndicesRecoveryResponse.ts#L24-L26 type Response map[string]types.RecoveryStatus diff --git a/typedapi/indices/refresh/refresh.go b/typedapi/indices/refresh/refresh.go index 22d54d9df1..e4fbae191a 100644 --- a/typedapi/indices/refresh/refresh.go +++ b/typedapi/indices/refresh/refresh.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Performs the refresh operation in one or more indices. package refresh diff --git a/typedapi/indices/refresh/response.go b/typedapi/indices/refresh/response.go index c304f8d124..c71e07c9ae 100644 --- a/typedapi/indices/refresh/response.go +++ b/typedapi/indices/refresh/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package refresh @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package refresh // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/refresh/IndicesRefreshResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/refresh/IndicesRefreshResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` } diff --git a/typedapi/indices/reloadsearchanalyzers/reload_search_analyzers.go b/typedapi/indices/reloadsearchanalyzers/reload_search_analyzers.go index 823b01fbbf..1b8d69e504 100644 --- a/typedapi/indices/reloadsearchanalyzers/reload_search_analyzers.go +++ b/typedapi/indices/reloadsearchanalyzers/reload_search_analyzers.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Reloads an index's search analyzers and their resources. package reloadsearchanalyzers diff --git a/typedapi/indices/reloadsearchanalyzers/response.go b/typedapi/indices/reloadsearchanalyzers/response.go index 308b4b2cf5..dba08c1251 100644 --- a/typedapi/indices/reloadsearchanalyzers/response.go +++ b/typedapi/indices/reloadsearchanalyzers/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package reloadsearchanalyzers @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package reloadsearchanalyzers // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/reload_search_analyzers/ReloadSearchAnalyzersResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/reload_search_analyzers/ReloadSearchAnalyzersResponse.ts#L22-L24 type Response struct { ReloadDetails []types.ReloadDetails `json:"reload_details"` Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/indices/resolvecluster/resolve_cluster.go b/typedapi/indices/resolvecluster/resolve_cluster.go new file mode 100644 index 0000000000..a92550dfc6 --- /dev/null +++ b/typedapi/indices/resolvecluster/resolve_cluster.go @@ -0,0 +1,354 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +// Resolves the specified index expressions to return information about each +// cluster, including the local cluster, if included. +package resolvecluster + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + "strconv" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/expandwildcard" +) + +const ( + nameMask = iota + 1 +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type ResolveCluster struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + paramSet int + + name string + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewResolveCluster type alias for index. +type NewResolveCluster func(name string) *ResolveCluster + +// NewResolveClusterFunc returns a new instance of ResolveCluster with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewResolveClusterFunc(tp elastictransport.Interface) NewResolveCluster { + return func(name string) *ResolveCluster { + n := New(tp) + + n._name(name) + + return n + } +} + +// Resolves the specified index expressions to return information about each +// cluster, including the local cluster, if included. +// +// https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-resolve-cluster-api.html +func New(tp elastictransport.Interface) *ResolveCluster { + r := &ResolveCluster{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *ResolveCluster) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == nameMask: + path.WriteString("/") + path.WriteString("_resolve") + path.WriteString("/") + path.WriteString("cluster") + path.WriteString("/") + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordPathPart(ctx, "name", r.name) + } + path.WriteString(r.name) + + method = http.MethodGet + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r ResolveCluster) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "indices.resolve_cluster") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "indices.resolve_cluster") + if reader := instrument.RecordRequestBody(ctx, "indices.resolve_cluster", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "indices.resolve_cluster") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the ResolveCluster query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a resolvecluster.Response +func (r ResolveCluster) Do(providedCtx context.Context) (Response, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "indices.resolve_cluster") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + response := NewResponse() + + res, err := r.Perform(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + defer res.Body.Close() + + if res.StatusCode < 299 { + err = json.NewDecoder(res.Body).Decode(&response) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + return response, nil + } + + errorResponse := types.NewElasticsearchError() + err = json.NewDecoder(res.Body).Decode(errorResponse) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if errorResponse.Status == 0 { + errorResponse.Status = res.StatusCode + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, errorResponse) + } + return nil, errorResponse +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r ResolveCluster) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "indices.resolve_cluster") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(ioutil.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the ResolveCluster query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + +// Header set a key, value pair in the ResolveCluster headers map. +func (r *ResolveCluster) Header(key, value string) *ResolveCluster { + r.headers.Set(key, value) + + return r +} + +// Name Comma-separated name(s) or index pattern(s) of the indices, aliases, and data +// streams to resolve. +// Resources on remote clusters can be specified using the ``:`` +// syntax. +// API Name: name +func (r *ResolveCluster) _name(name string) *ResolveCluster { + r.paramSet |= nameMask + r.name = name + + return r +} + +// AllowNoIndices If false, the request returns an error if any wildcard expression, index +// alias, or _all value targets only missing +// or closed indices. This behavior applies even if the request targets other +// open indices. For example, a request +// targeting foo*,bar* returns an error if an index starts with foo but no index +// starts with bar. +// API name: allow_no_indices +func (r *ResolveCluster) AllowNoIndices(allownoindices bool) *ResolveCluster { + r.values.Set("allow_no_indices", strconv.FormatBool(allownoindices)) + + return r +} + +// ExpandWildcards Type of index that wildcard patterns can match. +// If the request can target data streams, this argument determines whether +// wildcard expressions match hidden data streams. +// Supports comma-separated values, such as `open,hidden`. +// Valid values are: `all`, `open`, `closed`, `hidden`, `none`. +// API name: expand_wildcards +func (r *ResolveCluster) ExpandWildcards(expandwildcards ...expandwildcard.ExpandWildcard) *ResolveCluster { + tmp := []string{} + for _, item := range expandwildcards { + tmp = append(tmp, item.String()) + } + r.values.Set("expand_wildcards", strings.Join(tmp, ",")) + + return r +} + +// IgnoreThrottled If true, concrete, expanded or aliased indices are ignored when frozen. +// Defaults to false. +// API name: ignore_throttled +func (r *ResolveCluster) IgnoreThrottled(ignorethrottled bool) *ResolveCluster { + r.values.Set("ignore_throttled", strconv.FormatBool(ignorethrottled)) + + return r +} + +// IgnoreUnavailable If false, the request returns an error if it targets a missing or closed +// index. Defaults to false. +// API name: ignore_unavailable +func (r *ResolveCluster) IgnoreUnavailable(ignoreunavailable bool) *ResolveCluster { + r.values.Set("ignore_unavailable", strconv.FormatBool(ignoreunavailable)) + + return r +} diff --git a/typedapi/indices/resolvecluster/response.go b/typedapi/indices/resolvecluster/response.go new file mode 100644 index 0000000000..442cd4656b --- /dev/null +++ b/typedapi/indices/resolvecluster/response.go @@ -0,0 +1,37 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package resolvecluster + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +// Response holds the response body struct for the package resolvecluster +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/resolve_cluster/ResolveClusterResponse.ts#L24-L26 + +type Response map[string]types.ResolveClusterInfo + +// NewResponse returns a Response +func NewResponse() Response { + r := make(Response, 0) + return r +} diff --git a/typedapi/indices/resolveindex/resolve_index.go b/typedapi/indices/resolveindex/resolve_index.go index d97584d4a3..b7453dfa9c 100644 --- a/typedapi/indices/resolveindex/resolve_index.go +++ b/typedapi/indices/resolveindex/resolve_index.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about any matching indices, aliases, and data streams package resolveindex diff --git a/typedapi/indices/resolveindex/response.go b/typedapi/indices/resolveindex/response.go index ff33d3fc59..cc7b893f22 100644 --- a/typedapi/indices/resolveindex/response.go +++ b/typedapi/indices/resolveindex/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package resolveindex @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package resolveindex // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/resolve_index/ResolveIndexResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/resolve_index/ResolveIndexResponse.ts#L22-L28 type Response struct { Aliases []types.ResolveIndexAliasItem `json:"aliases"` DataStreams []types.ResolveIndexDataStreamsItem `json:"data_streams"` diff --git a/typedapi/indices/rollover/request.go b/typedapi/indices/rollover/request.go index c2284f060a..8a5a83e65b 100644 --- a/typedapi/indices/rollover/request.go +++ b/typedapi/indices/rollover/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package rollover @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package rollover // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/rollover/IndicesRolloverRequest.ts#L29-L99 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/rollover/IndicesRolloverRequest.ts#L29-L99 type Request struct { // Aliases Aliases for the target index. diff --git a/typedapi/indices/rollover/response.go b/typedapi/indices/rollover/response.go index 6505ca0000..b0e391cea1 100644 --- a/typedapi/indices/rollover/response.go +++ b/typedapi/indices/rollover/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package rollover // Response holds the response body struct for the package rollover // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/rollover/IndicesRolloverResponse.ts#L22-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/rollover/IndicesRolloverResponse.ts#L22-L32 type Response struct { Acknowledged bool `json:"acknowledged"` Conditions map[string]bool `json:"conditions"` diff --git a/typedapi/indices/rollover/rollover.go b/typedapi/indices/rollover/rollover.go index b0b9de63cf..b935147597 100644 --- a/typedapi/indices/rollover/rollover.go +++ b/typedapi/indices/rollover/rollover.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates an alias to point to a new index when the existing index // is considered to be too large or too old. diff --git a/typedapi/indices/segments/response.go b/typedapi/indices/segments/response.go index 36650cbeb6..47bf018531 100644 --- a/typedapi/indices/segments/response.go +++ b/typedapi/indices/segments/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package segments @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package segments // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/segments/IndicesSegmentsResponse.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/segments/IndicesSegmentsResponse.ts#L24-L29 type Response struct { Indices map[string]types.IndexSegment `json:"indices"` Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/indices/segments/segments.go b/typedapi/indices/segments/segments.go index b31790f626..07c03f8619 100644 --- a/typedapi/indices/segments/segments.go +++ b/typedapi/indices/segments/segments.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Provides low-level information about segments in a Lucene index. package segments diff --git a/typedapi/indices/shardstores/response.go b/typedapi/indices/shardstores/response.go index 21b9cdac6a..36fd62d1cc 100644 --- a/typedapi/indices/shardstores/response.go +++ b/typedapi/indices/shardstores/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package shardstores @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package shardstores // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/shard_stores/IndicesShardStoresResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/shard_stores/IndicesShardStoresResponse.ts#L24-L26 type Response struct { Indices map[string]types.IndicesShardStores `json:"indices"` } diff --git a/typedapi/indices/shardstores/shard_stores.go b/typedapi/indices/shardstores/shard_stores.go index 57ae16de93..9d2a1af8ef 100644 --- a/typedapi/indices/shardstores/shard_stores.go +++ b/typedapi/indices/shardstores/shard_stores.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Provides store information for shard copies of indices. package shardstores diff --git a/typedapi/indices/shrink/request.go b/typedapi/indices/shrink/request.go index 6a1a50a49d..6b10923a46 100644 --- a/typedapi/indices/shrink/request.go +++ b/typedapi/indices/shrink/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package shrink @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package shrink // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/shrink/IndicesShrinkRequest.ts#L27-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/shrink/IndicesShrinkRequest.ts#L27-L75 type Request struct { // Aliases The key is the alias name. diff --git a/typedapi/indices/shrink/response.go b/typedapi/indices/shrink/response.go index b15708d0fe..7e39bdb1ec 100644 --- a/typedapi/indices/shrink/response.go +++ b/typedapi/indices/shrink/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package shrink // Response holds the response body struct for the package shrink // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/shrink/IndicesShrinkResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/shrink/IndicesShrinkResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` Index string `json:"index"` diff --git a/typedapi/indices/shrink/shrink.go b/typedapi/indices/shrink/shrink.go index dbd18de39c..961288d2b4 100644 --- a/typedapi/indices/shrink/shrink.go +++ b/typedapi/indices/shrink/shrink.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allow to shrink an existing index into a new index with fewer primary shards. package shrink diff --git a/typedapi/indices/simulateindextemplate/request.go b/typedapi/indices/simulateindextemplate/request.go index 43da4f456e..e27d54af0f 100644 --- a/typedapi/indices/simulateindextemplate/request.go +++ b/typedapi/indices/simulateindextemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package simulateindextemplate @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package simulateindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/simulate_index_template/IndicesSimulateIndexTemplateRequest.ts#L33-L115 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/simulate_index_template/IndicesSimulateIndexTemplateRequest.ts#L33-L115 type Request struct { // AllowAutoCreate This setting overrides the value of the `action.auto_create_index` cluster @@ -115,7 +115,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowAutoCreate", err) } s.AllowAutoCreate = &value case bool: @@ -124,12 +124,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "composed_of": if err := dec.Decode(&s.ComposedOf); err != nil { - return err + return fmt.Errorf("%s | %w", "ComposedOf", err) } case "data_stream": if err := dec.Decode(&s.DataStream); err != nil { - return err + return fmt.Errorf("%s | %w", "DataStream", err) } case "index_patterns": @@ -138,19 +138,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPatterns", err) } s.IndexPatterns = append(s.IndexPatterns, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.IndexPatterns); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPatterns", err) } } case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "priority": @@ -161,7 +161,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Priority", err) } s.Priority = &value case float64: @@ -171,12 +171,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "template": if err := dec.Decode(&s.Template); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/indices/simulateindextemplate/response.go b/typedapi/indices/simulateindextemplate/response.go index b1b8a618cd..5a2c4aff58 100644 --- a/typedapi/indices/simulateindextemplate/response.go +++ b/typedapi/indices/simulateindextemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package simulateindextemplate // Response holds the response body struct for the package simulateindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/simulate_index_template/IndicesSimulateIndexTemplateResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/simulate_index_template/IndicesSimulateIndexTemplateResponse.ts#L20-L22 type Response struct { } diff --git a/typedapi/indices/simulateindextemplate/simulate_index_template.go b/typedapi/indices/simulateindextemplate/simulate_index_template.go index ab63beb8e3..98b6c81da2 100644 --- a/typedapi/indices/simulateindextemplate/simulate_index_template.go +++ b/typedapi/indices/simulateindextemplate/simulate_index_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Simulate matching the given index name against the index templates in the // system diff --git a/typedapi/indices/simulatetemplate/request.go b/typedapi/indices/simulatetemplate/request.go index 9ce8514f33..183c792b03 100644 --- a/typedapi/indices/simulatetemplate/request.go +++ b/typedapi/indices/simulatetemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package simulatetemplate @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package simulatetemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/simulate_template/IndicesSimulateTemplateRequest.ts#L25-L61 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/simulate_template/IndicesSimulateTemplateRequest.ts#L25-L61 type Request = types.IndexTemplate diff --git a/typedapi/indices/simulatetemplate/response.go b/typedapi/indices/simulatetemplate/response.go index 7c5ee21a4a..5b8027e9ee 100644 --- a/typedapi/indices/simulatetemplate/response.go +++ b/typedapi/indices/simulatetemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package simulatetemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package simulatetemplate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L26-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L26-L31 type Response struct { Overlapping []types.Overlapping `json:"overlapping,omitempty"` Template types.Template `json:"template"` diff --git a/typedapi/indices/simulatetemplate/simulate_template.go b/typedapi/indices/simulatetemplate/simulate_template.go index e71c56bbe8..b797bf5a0b 100644 --- a/typedapi/indices/simulatetemplate/simulate_template.go +++ b/typedapi/indices/simulatetemplate/simulate_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Simulate resolving the given template name or body package simulatetemplate diff --git a/typedapi/indices/split/request.go b/typedapi/indices/split/request.go index 21c04ecb72..fda3e65a6e 100644 --- a/typedapi/indices/split/request.go +++ b/typedapi/indices/split/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package split @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package split // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/split/IndicesSplitRequest.ts#L27-L74 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/split/IndicesSplitRequest.ts#L27-L74 type Request struct { // Aliases Aliases for the resulting index. diff --git a/typedapi/indices/split/response.go b/typedapi/indices/split/response.go index c2fbfb08c0..5016a701e2 100644 --- a/typedapi/indices/split/response.go +++ b/typedapi/indices/split/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package split // Response holds the response body struct for the package split // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/split/IndicesSplitResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/split/IndicesSplitResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` Index string `json:"index"` diff --git a/typedapi/indices/split/split.go b/typedapi/indices/split/split.go index d064592888..d080fc4bbc 100644 --- a/typedapi/indices/split/split.go +++ b/typedapi/indices/split/split.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows you to split an existing index into a new index with more primary // shards. diff --git a/typedapi/indices/stats/response.go b/typedapi/indices/stats/response.go index f32354be45..ee4bf411c6 100644 --- a/typedapi/indices/stats/response.go +++ b/typedapi/indices/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/IndicesStatsResponse.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/IndicesStatsResponse.ts#L24-L30 type Response struct { All_ types.IndicesStats `json:"_all"` Indices map[string]types.IndicesStats `json:"indices,omitempty"` diff --git a/typedapi/indices/stats/stats.go b/typedapi/indices/stats/stats.go index c1058e88be..204fd5d262 100644 --- a/typedapi/indices/stats/stats.go +++ b/typedapi/indices/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Provides statistics on operations happening in an index. package stats diff --git a/typedapi/indices/unfreeze/response.go b/typedapi/indices/unfreeze/response.go index 220ca2f5e0..c3a1843f28 100644 --- a/typedapi/indices/unfreeze/response.go +++ b/typedapi/indices/unfreeze/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package unfreeze // Response holds the response body struct for the package unfreeze // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/unfreeze/IndicesUnfreezeResponse.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/unfreeze/IndicesUnfreezeResponse.ts#L20-L25 type Response struct { Acknowledged bool `json:"acknowledged"` ShardsAcknowledged bool `json:"shards_acknowledged"` diff --git a/typedapi/indices/unfreeze/unfreeze.go b/typedapi/indices/unfreeze/unfreeze.go index b56d48d400..494d675330 100644 --- a/typedapi/indices/unfreeze/unfreeze.go +++ b/typedapi/indices/unfreeze/unfreeze.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Unfreezes an index. When a frozen index is unfrozen, the index goes through // the normal recovery process and becomes writeable again. diff --git a/typedapi/indices/updatealiases/request.go b/typedapi/indices/updatealiases/request.go index 8c2aab77f8..5d8f98988c 100644 --- a/typedapi/indices/updatealiases/request.go +++ b/typedapi/indices/updatealiases/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatealiases @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package updatealiases // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/update_aliases/IndicesUpdateAliasesRequest.ts#L24-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/update_aliases/IndicesUpdateAliasesRequest.ts#L24-L51 type Request struct { // Actions Actions to perform. diff --git a/typedapi/indices/updatealiases/response.go b/typedapi/indices/updatealiases/response.go index 00a0022345..ce4b46265a 100644 --- a/typedapi/indices/updatealiases/response.go +++ b/typedapi/indices/updatealiases/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatealiases // Response holds the response body struct for the package updatealiases // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/update_aliases/IndicesUpdateAliasesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/update_aliases/IndicesUpdateAliasesResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/updatealiases/update_aliases.go b/typedapi/indices/updatealiases/update_aliases.go index cf66b965f5..74d3fd9dd5 100644 --- a/typedapi/indices/updatealiases/update_aliases.go +++ b/typedapi/indices/updatealiases/update_aliases.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates index aliases. package updatealiases diff --git a/typedapi/indices/validatequery/request.go b/typedapi/indices/validatequery/request.go index d4d888fe50..2a15cfb3d0 100644 --- a/typedapi/indices/validatequery/request.go +++ b/typedapi/indices/validatequery/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package validatequery @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package validatequery // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/validate_query/IndicesValidateQueryRequest.ts#L25-L111 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/validate_query/IndicesValidateQueryRequest.ts#L25-L111 type Request struct { // Query Query in the Lucene query string syntax. diff --git a/typedapi/indices/validatequery/response.go b/typedapi/indices/validatequery/response.go index 65f5ef0dd8..5bf8eff1b5 100644 --- a/typedapi/indices/validatequery/response.go +++ b/typedapi/indices/validatequery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package validatequery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package validatequery // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/validate_query/IndicesValidateQueryResponse.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/validate_query/IndicesValidateQueryResponse.ts#L23-L30 type Response struct { Error *string `json:"error,omitempty"` Explanations []types.IndicesValidationExplanation `json:"explanations,omitempty"` diff --git a/typedapi/indices/validatequery/validate_query.go b/typedapi/indices/validatequery/validate_query.go index e327300820..3c91c85546 100644 --- a/typedapi/indices/validatequery/validate_query.go +++ b/typedapi/indices/validatequery/validate_query.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows a user to validate a potentially expensive query without executing it. package validatequery diff --git a/typedapi/inference/deletemodel/delete_model.go b/typedapi/inference/deletemodel/delete_model.go index d0664c2daa..b9f6fa4dbd 100644 --- a/typedapi/inference/deletemodel/delete_model.go +++ b/typedapi/inference/deletemodel/delete_model.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Delete model in the Inference API package deletemodel @@ -39,7 +39,7 @@ import ( const ( tasktypeMask = iota + 1 - modelidMask + inferenceidMask ) // ErrBuildPath is returned in case of missing parameters within the build of the request. @@ -56,8 +56,8 @@ type DeleteModel struct { paramSet int - tasktype string - modelid string + tasktype string + inferenceid string spanStarted bool @@ -65,17 +65,15 @@ type DeleteModel struct { } // NewDeleteModel type alias for index. -type NewDeleteModel func(tasktype, modelid string) *DeleteModel +type NewDeleteModel func(inferenceid string) *DeleteModel // NewDeleteModelFunc returns a new instance of DeleteModel with the provided transport. // Used in the index of the library this allows to retrieve every apis in once place. func NewDeleteModelFunc(tp elastictransport.Interface) NewDeleteModel { - return func(tasktype, modelid string) *DeleteModel { + return func(inferenceid string) *DeleteModel { n := New(tp) - n._tasktype(tasktype) - - n._modelid(modelid) + n._inferenceid(inferenceid) return n } @@ -112,7 +110,18 @@ func (r *DeleteModel) HttpRequest(ctx context.Context) (*http.Request, error) { r.path.Scheme = "http" switch { - case r.paramSet == tasktypeMask|modelidMask: + case r.paramSet == inferenceidMask: + path.WriteString("/") + path.WriteString("_inference") + path.WriteString("/") + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordPathPart(ctx, "inferenceid", r.inferenceid) + } + path.WriteString(r.inferenceid) + + method = http.MethodDelete + case r.paramSet == tasktypeMask|inferenceidMask: path.WriteString("/") path.WriteString("_inference") path.WriteString("/") @@ -124,9 +133,9 @@ func (r *DeleteModel) HttpRequest(ctx context.Context) (*http.Request, error) { path.WriteString("/") if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.RecordPathPart(ctx, "modelid", r.modelid) + instrument.RecordPathPart(ctx, "inferenceid", r.inferenceid) } - path.WriteString(r.modelid) + path.WriteString(r.inferenceid) method = http.MethodDelete } @@ -299,20 +308,20 @@ func (r *DeleteModel) Header(key, value string) *DeleteModel { return r } -// TaskType The model task type +// TaskType The task type // API Name: tasktype -func (r *DeleteModel) _tasktype(tasktype string) *DeleteModel { +func (r *DeleteModel) TaskType(tasktype string) *DeleteModel { r.paramSet |= tasktypeMask r.tasktype = tasktype return r } -// ModelId The unique identifier of the inference model. -// API Name: modelid -func (r *DeleteModel) _modelid(modelid string) *DeleteModel { - r.paramSet |= modelidMask - r.modelid = modelid +// InferenceId The inference Id +// API Name: inferenceid +func (r *DeleteModel) _inferenceid(inferenceid string) *DeleteModel { + r.paramSet |= inferenceidMask + r.inferenceid = inferenceid return r } diff --git a/typedapi/inference/deletemodel/response.go b/typedapi/inference/deletemodel/response.go index 0aab0836a1..70301f533f 100644 --- a/typedapi/inference/deletemodel/response.go +++ b/typedapi/inference/deletemodel/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletemodel // Response holds the response body struct for the package deletemodel // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/delete_model/DeleteModelResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/delete_model/DeleteModelResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/inference/getmodel/get_model.go b/typedapi/inference/getmodel/get_model.go index 56c893e0f7..5c487d74f0 100644 --- a/typedapi/inference/getmodel/get_model.go +++ b/typedapi/inference/getmodel/get_model.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Get a model in the Inference API package getmodel @@ -39,7 +39,7 @@ import ( const ( tasktypeMask = iota + 1 - modelidMask + inferenceidMask ) // ErrBuildPath is returned in case of missing parameters within the build of the request. @@ -56,8 +56,8 @@ type GetModel struct { paramSet int - tasktype string - modelid string + tasktype string + inferenceid string spanStarted bool @@ -65,17 +65,15 @@ type GetModel struct { } // NewGetModel type alias for index. -type NewGetModel func(tasktype, modelid string) *GetModel +type NewGetModel func(inferenceid string) *GetModel // NewGetModelFunc returns a new instance of GetModel with the provided transport. // Used in the index of the library this allows to retrieve every apis in once place. func NewGetModelFunc(tp elastictransport.Interface) NewGetModel { - return func(tasktype, modelid string) *GetModel { + return func(inferenceid string) *GetModel { n := New(tp) - n._tasktype(tasktype) - - n._modelid(modelid) + n._inferenceid(inferenceid) return n } @@ -112,7 +110,18 @@ func (r *GetModel) HttpRequest(ctx context.Context) (*http.Request, error) { r.path.Scheme = "http" switch { - case r.paramSet == tasktypeMask|modelidMask: + case r.paramSet == inferenceidMask: + path.WriteString("/") + path.WriteString("_inference") + path.WriteString("/") + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordPathPart(ctx, "inferenceid", r.inferenceid) + } + path.WriteString(r.inferenceid) + + method = http.MethodGet + case r.paramSet == tasktypeMask|inferenceidMask: path.WriteString("/") path.WriteString("_inference") path.WriteString("/") @@ -124,9 +133,9 @@ func (r *GetModel) HttpRequest(ctx context.Context) (*http.Request, error) { path.WriteString("/") if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.RecordPathPart(ctx, "modelid", r.modelid) + instrument.RecordPathPart(ctx, "inferenceid", r.inferenceid) } - path.WriteString(r.modelid) + path.WriteString(r.inferenceid) method = http.MethodGet } @@ -299,20 +308,20 @@ func (r *GetModel) Header(key, value string) *GetModel { return r } -// TaskType The model task type +// TaskType The task type // API Name: tasktype -func (r *GetModel) _tasktype(tasktype string) *GetModel { +func (r *GetModel) TaskType(tasktype string) *GetModel { r.paramSet |= tasktypeMask r.tasktype = tasktype return r } -// ModelId The unique identifier of the inference model. -// API Name: modelid -func (r *GetModel) _modelid(modelid string) *GetModel { - r.paramSet |= modelidMask - r.modelid = modelid +// InferenceId The inference Id +// API Name: inferenceid +func (r *GetModel) _inferenceid(inferenceid string) *GetModel { + r.paramSet |= inferenceidMask + r.inferenceid = inferenceid return r } diff --git a/typedapi/inference/getmodel/response.go b/typedapi/inference/getmodel/response.go index fa169b81da..7b0979bb8a 100644 --- a/typedapi/inference/getmodel/response.go +++ b/typedapi/inference/getmodel/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getmodel @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getmodel // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/get_model/GetModelResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/get_model/GetModelResponse.ts#L22-L26 type Response struct { Models []types.ModelConfigContainer `json:"models"` } diff --git a/typedapi/inference/inference/inference.go b/typedapi/inference/inference/inference.go index d5c9760c22..e47cd8c9cd 100644 --- a/typedapi/inference/inference/inference.go +++ b/typedapi/inference/inference/inference.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Perform inference on a model package inference @@ -39,7 +39,7 @@ import ( const ( tasktypeMask = iota + 1 - modelidMask + inferenceidMask ) // ErrBuildPath is returned in case of missing parameters within the build of the request. @@ -60,8 +60,8 @@ type Inference struct { paramSet int - tasktype string - modelid string + tasktype string + inferenceid string spanStarted bool @@ -69,17 +69,15 @@ type Inference struct { } // NewInference type alias for index. -type NewInference func(tasktype, modelid string) *Inference +type NewInference func(inferenceid string) *Inference // NewInferenceFunc returns a new instance of Inference with the provided transport. // Used in the index of the library this allows to retrieve every apis in once place. func NewInferenceFunc(tp elastictransport.Interface) NewInference { - return func(tasktype, modelid string) *Inference { + return func(inferenceid string) *Inference { n := New(tp) - n._tasktype(tasktype) - - n._modelid(modelid) + n._inferenceid(inferenceid) return n } @@ -160,7 +158,18 @@ func (r *Inference) HttpRequest(ctx context.Context) (*http.Request, error) { r.path.Scheme = "http" switch { - case r.paramSet == tasktypeMask|modelidMask: + case r.paramSet == inferenceidMask: + path.WriteString("/") + path.WriteString("_inference") + path.WriteString("/") + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordPathPart(ctx, "inferenceid", r.inferenceid) + } + path.WriteString(r.inferenceid) + + method = http.MethodPost + case r.paramSet == tasktypeMask|inferenceidMask: path.WriteString("/") path.WriteString("_inference") path.WriteString("/") @@ -172,9 +181,9 @@ func (r *Inference) HttpRequest(ctx context.Context) (*http.Request, error) { path.WriteString("/") if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.RecordPathPart(ctx, "modelid", r.modelid) + instrument.RecordPathPart(ctx, "inferenceid", r.inferenceid) } - path.WriteString(r.modelid) + path.WriteString(r.inferenceid) method = http.MethodPost } @@ -194,6 +203,12 @@ func (r *Inference) HttpRequest(ctx context.Context) (*http.Request, error) { req.Header = r.headers.Clone() + if req.Header.Get("Content-Type") == "" { + if r.raw != nil { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + if req.Header.Get("Accept") == "" { req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") } @@ -308,20 +323,20 @@ func (r *Inference) Header(key, value string) *Inference { return r } -// TaskType The model task type +// TaskType The task type // API Name: tasktype -func (r *Inference) _tasktype(tasktype string) *Inference { +func (r *Inference) TaskType(tasktype string) *Inference { r.paramSet |= tasktypeMask r.tasktype = tasktype return r } -// ModelId The unique identifier of the inference model. -// API Name: modelid -func (r *Inference) _modelid(modelid string) *Inference { - r.paramSet |= modelidMask - r.modelid = modelid +// InferenceId The inference Id +// API Name: inferenceid +func (r *Inference) _inferenceid(inferenceid string) *Inference { + r.paramSet |= inferenceidMask + r.inferenceid = inferenceid return r } diff --git a/typedapi/inference/inference/request.go b/typedapi/inference/inference/request.go index cda6035d7f..24834602a0 100644 --- a/typedapi/inference/inference/request.go +++ b/typedapi/inference/inference/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package inference @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package inference // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/inference/InferenceRequest.ts#L25-L53 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/inference/InferenceRequest.ts#L25-L53 type Request struct { // Input Text input to the model. @@ -78,19 +78,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Input", err) } s.Input = append(s.Input, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Input); err != nil { - return err + return fmt.Errorf("%s | %w", "Input", err) } } case "task_settings": if err := dec.Decode(&s.TaskSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "TaskSettings", err) } } diff --git a/typedapi/inference/inference/response.go b/typedapi/inference/inference/response.go index aa4f60c779..8e389e6009 100644 --- a/typedapi/inference/inference/response.go +++ b/typedapi/inference/inference/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package inference @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package inference // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/inference/InferenceResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/inference/InferenceResponse.ts#L22-L24 type Response struct { SparseEmbedding []types.SparseEmbeddingResult `json:"sparse_embedding,omitempty"` TextEmbedding []types.TextEmbeddingResult `json:"text_embedding,omitempty"` diff --git a/typedapi/inference/putmodel/put_model.go b/typedapi/inference/putmodel/put_model.go index 0358e3f3bb..03e269d5d5 100644 --- a/typedapi/inference/putmodel/put_model.go +++ b/typedapi/inference/putmodel/put_model.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Configure a model for use in the Inference API package putmodel @@ -39,7 +39,7 @@ import ( const ( tasktypeMask = iota + 1 - modelidMask + inferenceidMask ) // ErrBuildPath is returned in case of missing parameters within the build of the request. @@ -60,8 +60,8 @@ type PutModel struct { paramSet int - tasktype string - modelid string + tasktype string + inferenceid string spanStarted bool @@ -69,17 +69,15 @@ type PutModel struct { } // NewPutModel type alias for index. -type NewPutModel func(tasktype, modelid string) *PutModel +type NewPutModel func(inferenceid string) *PutModel // NewPutModelFunc returns a new instance of PutModel with the provided transport. // Used in the index of the library this allows to retrieve every apis in once place. func NewPutModelFunc(tp elastictransport.Interface) NewPutModel { - return func(tasktype, modelid string) *PutModel { + return func(inferenceid string) *PutModel { n := New(tp) - n._tasktype(tasktype) - - n._modelid(modelid) + n._inferenceid(inferenceid) return n } @@ -158,7 +156,18 @@ func (r *PutModel) HttpRequest(ctx context.Context) (*http.Request, error) { r.path.Scheme = "http" switch { - case r.paramSet == tasktypeMask|modelidMask: + case r.paramSet == inferenceidMask: + path.WriteString("/") + path.WriteString("_inference") + path.WriteString("/") + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordPathPart(ctx, "inferenceid", r.inferenceid) + } + path.WriteString(r.inferenceid) + + method = http.MethodPut + case r.paramSet == tasktypeMask|inferenceidMask: path.WriteString("/") path.WriteString("_inference") path.WriteString("/") @@ -170,9 +179,9 @@ func (r *PutModel) HttpRequest(ctx context.Context) (*http.Request, error) { path.WriteString("/") if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.RecordPathPart(ctx, "modelid", r.modelid) + instrument.RecordPathPart(ctx, "inferenceid", r.inferenceid) } - path.WriteString(r.modelid) + path.WriteString(r.inferenceid) method = http.MethodPut } @@ -192,6 +201,12 @@ func (r *PutModel) HttpRequest(ctx context.Context) (*http.Request, error) { req.Header = r.headers.Clone() + if req.Header.Get("Content-Type") == "" { + if r.raw != nil { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + if req.Header.Get("Accept") == "" { req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") } @@ -306,20 +321,20 @@ func (r *PutModel) Header(key, value string) *PutModel { return r } -// TaskType The model task type +// TaskType The task type // API Name: tasktype -func (r *PutModel) _tasktype(tasktype string) *PutModel { +func (r *PutModel) TaskType(tasktype string) *PutModel { r.paramSet |= tasktypeMask r.tasktype = tasktype return r } -// ModelId The unique identifier of the inference model. -// API Name: modelid -func (r *PutModel) _modelid(modelid string) *PutModel { - r.paramSet |= modelidMask - r.modelid = modelid +// InferenceId The inference Id +// API Name: inferenceid +func (r *PutModel) _inferenceid(inferenceid string) *PutModel { + r.paramSet |= inferenceidMask + r.inferenceid = inferenceid return r } diff --git a/typedapi/inference/putmodel/request.go b/typedapi/inference/putmodel/request.go index 0534358684..27b0ecf2e7 100644 --- a/typedapi/inference/putmodel/request.go +++ b/typedapi/inference/putmodel/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putmodel @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package putmodel // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/put_model/PutModelRequest.ts#L25-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/put_model/PutModelRequest.ts#L25-L44 type Request = types.ModelConfig diff --git a/typedapi/inference/putmodel/response.go b/typedapi/inference/putmodel/response.go index bb89ce6fbd..1d94d8a7d9 100644 --- a/typedapi/inference/putmodel/response.go +++ b/typedapi/inference/putmodel/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putmodel @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package putmodel // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/put_model/PutModelResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/put_model/PutModelResponse.ts#L22-L24 type Response struct { // ModelId The model Id diff --git a/typedapi/ingest/deletepipeline/delete_pipeline.go b/typedapi/ingest/deletepipeline/delete_pipeline.go index 1865cae38c..1be10a4c58 100644 --- a/typedapi/ingest/deletepipeline/delete_pipeline.go +++ b/typedapi/ingest/deletepipeline/delete_pipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes a pipeline. package deletepipeline diff --git a/typedapi/ingest/deletepipeline/response.go b/typedapi/ingest/deletepipeline/response.go index acf58701db..dbef9cb9bf 100644 --- a/typedapi/ingest/deletepipeline/response.go +++ b/typedapi/ingest/deletepipeline/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletepipeline // Response holds the response body struct for the package deletepipeline // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/delete_pipeline/DeletePipelineResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/delete_pipeline/DeletePipelineResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ingest/geoipstats/geo_ip_stats.go b/typedapi/ingest/geoipstats/geo_ip_stats.go index 588c959176..b832f9d8bf 100644 --- a/typedapi/ingest/geoipstats/geo_ip_stats.go +++ b/typedapi/ingest/geoipstats/geo_ip_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns statistical information about geoip databases package geoipstats diff --git a/typedapi/ingest/geoipstats/response.go b/typedapi/ingest/geoipstats/response.go index acc06f394b..6baad7da4e 100644 --- a/typedapi/ingest/geoipstats/response.go +++ b/typedapi/ingest/geoipstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package geoipstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package geoipstats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/geo_ip_stats/IngestGeoIpStatsResponse.ts#L24-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/geo_ip_stats/IngestGeoIpStatsResponse.ts#L24-L31 type Response struct { // Nodes Downloaded GeoIP2 databases for each node. diff --git a/typedapi/ingest/getpipeline/get_pipeline.go b/typedapi/ingest/getpipeline/get_pipeline.go index e0fc550426..ddc6c84e07 100644 --- a/typedapi/ingest/getpipeline/get_pipeline.go +++ b/typedapi/ingest/getpipeline/get_pipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns a pipeline. package getpipeline diff --git a/typedapi/ingest/getpipeline/response.go b/typedapi/ingest/getpipeline/response.go index 983220129b..93b485b8fd 100644 --- a/typedapi/ingest/getpipeline/response.go +++ b/typedapi/ingest/getpipeline/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getpipeline @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/get_pipeline/GetPipelineResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/get_pipeline/GetPipelineResponse.ts#L23-L25 type Response map[string]types.IngestPipeline diff --git a/typedapi/ingest/processorgrok/processor_grok.go b/typedapi/ingest/processorgrok/processor_grok.go index d6c28850a4..818028cd21 100644 --- a/typedapi/ingest/processorgrok/processor_grok.go +++ b/typedapi/ingest/processorgrok/processor_grok.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns a list of the built-in patterns. package processorgrok diff --git a/typedapi/ingest/processorgrok/response.go b/typedapi/ingest/processorgrok/response.go index 5f265ee29a..9a2e3c48fb 100644 --- a/typedapi/ingest/processorgrok/response.go +++ b/typedapi/ingest/processorgrok/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package processorgrok // Response holds the response body struct for the package processorgrok // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/processor_grok/GrokProcessorPatternsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/processor_grok/GrokProcessorPatternsResponse.ts#L22-L24 type Response struct { Patterns map[string]string `json:"patterns"` } diff --git a/typedapi/ingest/putpipeline/put_pipeline.go b/typedapi/ingest/putpipeline/put_pipeline.go index 6e29e388f4..943d127a49 100644 --- a/typedapi/ingest/putpipeline/put_pipeline.go +++ b/typedapi/ingest/putpipeline/put_pipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates or updates a pipeline. package putpipeline diff --git a/typedapi/ingest/putpipeline/request.go b/typedapi/ingest/putpipeline/request.go index 70e6713b92..5b521648bd 100644 --- a/typedapi/ingest/putpipeline/request.go +++ b/typedapi/ingest/putpipeline/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putpipeline @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/put_pipeline/PutPipelineRequest.ts#L25-L77 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/put_pipeline/PutPipelineRequest.ts#L25-L77 type Request struct { // Description Description of the ingest pipeline. @@ -92,7 +92,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -103,22 +103,22 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "processors": if err := dec.Decode(&s.Processors); err != nil { - return err + return fmt.Errorf("%s | %w", "Processors", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/ingest/putpipeline/response.go b/typedapi/ingest/putpipeline/response.go index 5011229808..92f2dd4091 100644 --- a/typedapi/ingest/putpipeline/response.go +++ b/typedapi/ingest/putpipeline/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putpipeline // Response holds the response body struct for the package putpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/put_pipeline/PutPipelineResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/put_pipeline/PutPipelineResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ingest/simulate/request.go b/typedapi/ingest/simulate/request.go index 59b4633908..8f3713e0cf 100644 --- a/typedapi/ingest/simulate/request.go +++ b/typedapi/ingest/simulate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package simulate @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package simulate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/simulate/SimulatePipelineRequest.ts#L25-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/simulate/SimulatePipelineRequest.ts#L25-L57 type Request struct { // Docs Sample documents to test in the pipeline. diff --git a/typedapi/ingest/simulate/response.go b/typedapi/ingest/simulate/response.go index 3496cd864d..66fbab9104 100644 --- a/typedapi/ingest/simulate/response.go +++ b/typedapi/ingest/simulate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package simulate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package simulate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/simulate/SimulatePipelineResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/simulate/SimulatePipelineResponse.ts#L22-L24 type Response struct { Docs []types.PipelineSimulation `json:"docs"` } diff --git a/typedapi/ingest/simulate/simulate.go b/typedapi/ingest/simulate/simulate.go index 2415e0442e..840932d0fe 100644 --- a/typedapi/ingest/simulate/simulate.go +++ b/typedapi/ingest/simulate/simulate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows to simulate a pipeline with example documents. package simulate diff --git a/typedapi/license/delete/delete.go b/typedapi/license/delete/delete.go index 133020a985..8b5d8c903a 100644 --- a/typedapi/license/delete/delete.go +++ b/typedapi/license/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes licensing information for the cluster package delete diff --git a/typedapi/license/delete/response.go b/typedapi/license/delete/response.go index 1d80ab31d0..9bd9cb928a 100644 --- a/typedapi/license/delete/response.go +++ b/typedapi/license/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/license/delete/DeleteLicenseResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/license/delete/DeleteLicenseResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/license/get/get.go b/typedapi/license/get/get.go index 8b176e9b1b..72463009d0 100644 --- a/typedapi/license/get/get.go +++ b/typedapi/license/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves licensing information for the cluster package get diff --git a/typedapi/license/get/response.go b/typedapi/license/get/response.go index f02d945b59..e32aab8007 100644 --- a/typedapi/license/get/response.go +++ b/typedapi/license/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/license/get/GetLicenseResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/license/get/GetLicenseResponse.ts#L22-L24 type Response struct { License types.LicenseInformation `json:"license"` } diff --git a/typedapi/license/getbasicstatus/get_basic_status.go b/typedapi/license/getbasicstatus/get_basic_status.go index 37e873ef36..03fd09fa9f 100644 --- a/typedapi/license/getbasicstatus/get_basic_status.go +++ b/typedapi/license/getbasicstatus/get_basic_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information about the status of the basic license. package getbasicstatus diff --git a/typedapi/license/getbasicstatus/response.go b/typedapi/license/getbasicstatus/response.go index fe87e8200c..4ab0914e2c 100644 --- a/typedapi/license/getbasicstatus/response.go +++ b/typedapi/license/getbasicstatus/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getbasicstatus // Response holds the response body struct for the package getbasicstatus // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/license/get_basic_status/GetBasicLicenseStatusResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/license/get_basic_status/GetBasicLicenseStatusResponse.ts#L20-L22 type Response struct { EligibleToStartBasic bool `json:"eligible_to_start_basic"` } diff --git a/typedapi/license/gettrialstatus/get_trial_status.go b/typedapi/license/gettrialstatus/get_trial_status.go index 5dddd4213c..bc1beae651 100644 --- a/typedapi/license/gettrialstatus/get_trial_status.go +++ b/typedapi/license/gettrialstatus/get_trial_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information about the status of the trial license. package gettrialstatus diff --git a/typedapi/license/gettrialstatus/response.go b/typedapi/license/gettrialstatus/response.go index 8dd7419ccc..4a8d6573d9 100644 --- a/typedapi/license/gettrialstatus/response.go +++ b/typedapi/license/gettrialstatus/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package gettrialstatus // Response holds the response body struct for the package gettrialstatus // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/license/get_trial_status/GetTrialLicenseStatusResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/license/get_trial_status/GetTrialLicenseStatusResponse.ts#L20-L22 type Response struct { EligibleToStartTrial bool `json:"eligible_to_start_trial"` } diff --git a/typedapi/license/post/post.go b/typedapi/license/post/post.go index aa67d29d7c..30f6f491e2 100644 --- a/typedapi/license/post/post.go +++ b/typedapi/license/post/post.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates the license for the cluster. package post diff --git a/typedapi/license/post/request.go b/typedapi/license/post/request.go index 065d27ad72..02252446ff 100644 --- a/typedapi/license/post/request.go +++ b/typedapi/license/post/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package post @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package post // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/license/post/PostLicenseRequest.ts#L23-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/license/post/PostLicenseRequest.ts#L23-L43 type Request struct { License *types.License `json:"license,omitempty"` // Licenses A sequence of one or more JSON documents containing the license information. diff --git a/typedapi/license/post/response.go b/typedapi/license/post/response.go index 8e3e9c9020..0d25450862 100644 --- a/typedapi/license/post/response.go +++ b/typedapi/license/post/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package post @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package post // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/license/post/PostLicenseResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/license/post/PostLicenseResponse.ts#L23-L29 type Response struct { Acknowledge *types.Acknowledgement `json:"acknowledge,omitempty"` Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/license/poststartbasic/post_start_basic.go b/typedapi/license/poststartbasic/post_start_basic.go index 968d38604e..9392b9fb4f 100644 --- a/typedapi/license/poststartbasic/post_start_basic.go +++ b/typedapi/license/poststartbasic/post_start_basic.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Starts an indefinite basic license. package poststartbasic diff --git a/typedapi/license/poststartbasic/response.go b/typedapi/license/poststartbasic/response.go index 399ac808c6..ffaa866db5 100644 --- a/typedapi/license/poststartbasic/response.go +++ b/typedapi/license/poststartbasic/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package poststartbasic @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // Response holds the response body struct for the package poststartbasic // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/license/post_start_basic/StartBasicLicenseResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/license/post_start_basic/StartBasicLicenseResponse.ts#L23-L31 type Response struct { Acknowledge map[string][]string `json:"acknowledge,omitempty"` Acknowledged bool `json:"acknowledged"` @@ -75,14 +76,14 @@ func (s *Response) UnmarshalJSON(data []byte) error { o := new(string) err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) if err != nil { - return err + return fmt.Errorf("%s | %w", "Acknowledge", err) } s.Acknowledge[key] = append(s.Acknowledge[key], *o) default: o := []string{} err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) if err != nil { - return err + return fmt.Errorf("%s | %w", "Acknowledge", err) } s.Acknowledge[key] = o } @@ -95,7 +96,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Acknowledged", err) } s.Acknowledged = value case bool: @@ -109,7 +110,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "BasicWasStarted", err) } s.BasicWasStarted = value case bool: @@ -119,7 +120,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "error_message": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ErrorMessage", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -130,7 +131,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/license/poststarttrial/post_start_trial.go b/typedapi/license/poststarttrial/post_start_trial.go index de6d6cc8e0..0126c8dca5 100644 --- a/typedapi/license/poststarttrial/post_start_trial.go +++ b/typedapi/license/poststarttrial/post_start_trial.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // starts a limited time trial license. package poststarttrial diff --git a/typedapi/license/poststarttrial/response.go b/typedapi/license/poststarttrial/response.go index 320bcb0a2e..94dfd39464 100644 --- a/typedapi/license/poststarttrial/response.go +++ b/typedapi/license/poststarttrial/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package poststarttrial @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package poststarttrial // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/license/post_start_trial/StartTrialLicenseResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/license/post_start_trial/StartTrialLicenseResponse.ts#L22-L29 type Response struct { Acknowledged bool `json:"acknowledged"` ErrorMessage *string `json:"error_message,omitempty"` diff --git a/typedapi/logstash/deletepipeline/delete_pipeline.go b/typedapi/logstash/deletepipeline/delete_pipeline.go index 0f1706813b..079932443a 100644 --- a/typedapi/logstash/deletepipeline/delete_pipeline.go +++ b/typedapi/logstash/deletepipeline/delete_pipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes Logstash Pipelines used by Central Management package deletepipeline diff --git a/typedapi/logstash/getpipeline/get_pipeline.go b/typedapi/logstash/getpipeline/get_pipeline.go index 14e35baec8..f9e24c2309 100644 --- a/typedapi/logstash/getpipeline/get_pipeline.go +++ b/typedapi/logstash/getpipeline/get_pipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves Logstash Pipelines used by Central Management package getpipeline diff --git a/typedapi/logstash/getpipeline/response.go b/typedapi/logstash/getpipeline/response.go index 05eaeed3f4..d2e580ba9a 100644 --- a/typedapi/logstash/getpipeline/response.go +++ b/typedapi/logstash/getpipeline/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getpipeline @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/logstash/get_pipeline/LogstashGetPipelineResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/logstash/get_pipeline/LogstashGetPipelineResponse.ts#L24-L26 type Response map[string]types.LogstashPipeline diff --git a/typedapi/logstash/putpipeline/put_pipeline.go b/typedapi/logstash/putpipeline/put_pipeline.go index 7c438086c9..b54b635f48 100644 --- a/typedapi/logstash/putpipeline/put_pipeline.go +++ b/typedapi/logstash/putpipeline/put_pipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Adds and updates Logstash Pipelines used for Central Management package putpipeline diff --git a/typedapi/logstash/putpipeline/request.go b/typedapi/logstash/putpipeline/request.go index f3c3ab23c8..b3cf9b4266 100644 --- a/typedapi/logstash/putpipeline/request.go +++ b/typedapi/logstash/putpipeline/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putpipeline @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package putpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/logstash/put_pipeline/LogstashPutPipelineRequest.ts#L24-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/logstash/put_pipeline/LogstashPutPipelineRequest.ts#L24-L39 type Request = types.LogstashPipeline diff --git a/typedapi/migration/deprecations/deprecations.go b/typedapi/migration/deprecations/deprecations.go index 04e3302405..4fbc963be9 100644 --- a/typedapi/migration/deprecations/deprecations.go +++ b/typedapi/migration/deprecations/deprecations.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information about different cluster, node, and index level settings // that use deprecated features that will be removed or changed in the next diff --git a/typedapi/migration/deprecations/response.go b/typedapi/migration/deprecations/response.go index adcb23300a..ec4bac3975 100644 --- a/typedapi/migration/deprecations/response.go +++ b/typedapi/migration/deprecations/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deprecations @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deprecations // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/migration/deprecations/DeprecationInfoResponse.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/migration/deprecations/DeprecationInfoResponse.ts#L23-L30 type Response struct { ClusterSettings []types.Deprecation `json:"cluster_settings"` IndexSettings map[string][]types.Deprecation `json:"index_settings"` diff --git a/typedapi/migration/getfeatureupgradestatus/get_feature_upgrade_status.go b/typedapi/migration/getfeatureupgradestatus/get_feature_upgrade_status.go index b75b61a6b9..fbd23f5abe 100644 --- a/typedapi/migration/getfeatureupgradestatus/get_feature_upgrade_status.go +++ b/typedapi/migration/getfeatureupgradestatus/get_feature_upgrade_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Find out whether system features need to be upgraded or not package getfeatureupgradestatus diff --git a/typedapi/migration/getfeatureupgradestatus/response.go b/typedapi/migration/getfeatureupgradestatus/response.go index fe7f751543..9cc20b2d50 100644 --- a/typedapi/migration/getfeatureupgradestatus/response.go +++ b/typedapi/migration/getfeatureupgradestatus/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getfeatureupgradestatus @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package getfeatureupgradestatus // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L23-L28 type Response struct { Features []types.GetMigrationFeature `json:"features"` MigrationStatus migrationstatus.MigrationStatus `json:"migration_status"` diff --git a/typedapi/migration/postfeatureupgrade/post_feature_upgrade.go b/typedapi/migration/postfeatureupgrade/post_feature_upgrade.go index 82a798e47e..c51410aff8 100644 --- a/typedapi/migration/postfeatureupgrade/post_feature_upgrade.go +++ b/typedapi/migration/postfeatureupgrade/post_feature_upgrade.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Begin upgrades for system features package postfeatureupgrade diff --git a/typedapi/migration/postfeatureupgrade/response.go b/typedapi/migration/postfeatureupgrade/response.go index c9c349dba8..7f5f057846 100644 --- a/typedapi/migration/postfeatureupgrade/response.go +++ b/typedapi/migration/postfeatureupgrade/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package postfeatureupgrade @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package postfeatureupgrade // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/migration/post_feature_upgrade/PostFeatureUpgradeResponse.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/migration/post_feature_upgrade/PostFeatureUpgradeResponse.ts#L20-L25 type Response struct { Accepted bool `json:"accepted"` Features []types.PostMigrationFeature `json:"features"` diff --git a/typedapi/ml/cleartrainedmodeldeploymentcache/clear_trained_model_deployment_cache.go b/typedapi/ml/cleartrainedmodeldeploymentcache/clear_trained_model_deployment_cache.go index 69aed45c43..8d2e696b61 100644 --- a/typedapi/ml/cleartrainedmodeldeploymentcache/clear_trained_model_deployment_cache.go +++ b/typedapi/ml/cleartrainedmodeldeploymentcache/clear_trained_model_deployment_cache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Clear the cached results from a trained model deployment package cleartrainedmodeldeploymentcache diff --git a/typedapi/ml/cleartrainedmodeldeploymentcache/response.go b/typedapi/ml/cleartrainedmodeldeploymentcache/response.go index 4b1a0332fc..4748bad40c 100644 --- a/typedapi/ml/cleartrainedmodeldeploymentcache/response.go +++ b/typedapi/ml/cleartrainedmodeldeploymentcache/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package cleartrainedmodeldeploymentcache // Response holds the response body struct for the package cleartrainedmodeldeploymentcache // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/clear_trained_model_deployment_cache/MlClearTrainedModelDeploymentCacheResponse.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/clear_trained_model_deployment_cache/MlClearTrainedModelDeploymentCacheResponse.ts#L20-L24 type Response struct { Cleared bool `json:"cleared"` } diff --git a/typedapi/ml/closejob/close_job.go b/typedapi/ml/closejob/close_job.go index dc0a340eae..2d4531b7e5 100644 --- a/typedapi/ml/closejob/close_job.go +++ b/typedapi/ml/closejob/close_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Closes one or more anomaly detection jobs. A job can be opened and closed // multiple times throughout its lifecycle. diff --git a/typedapi/ml/closejob/request.go b/typedapi/ml/closejob/request.go index b041500d1a..9ffac0a74f 100644 --- a/typedapi/ml/closejob/request.go +++ b/typedapi/ml/closejob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package closejob @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package closejob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/close_job/MlCloseJobRequest.ts#L24-L77 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/close_job/MlCloseJobRequest.ts#L24-L77 type Request struct { // AllowNoMatch Refer to the description for the `allow_no_match` query parameter. @@ -83,7 +83,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowNoMatch", err) } s.AllowNoMatch = &value case bool: @@ -97,7 +97,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Force", err) } s.Force = &value case bool: @@ -106,7 +106,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "timeout": if err := dec.Decode(&s.Timeout); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } } diff --git a/typedapi/ml/closejob/response.go b/typedapi/ml/closejob/response.go index 6322a44490..00f1301053 100644 --- a/typedapi/ml/closejob/response.go +++ b/typedapi/ml/closejob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package closejob // Response holds the response body struct for the package closejob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/close_job/MlCloseJobResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/close_job/MlCloseJobResponse.ts#L20-L22 type Response struct { Closed bool `json:"closed"` } diff --git a/typedapi/ml/deletecalendar/delete_calendar.go b/typedapi/ml/deletecalendar/delete_calendar.go index d0e5ae23ee..dcc5def5ce 100644 --- a/typedapi/ml/deletecalendar/delete_calendar.go +++ b/typedapi/ml/deletecalendar/delete_calendar.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes a calendar. package deletecalendar diff --git a/typedapi/ml/deletecalendar/response.go b/typedapi/ml/deletecalendar/response.go index 2fd5d830bb..ec5119d5f9 100644 --- a/typedapi/ml/deletecalendar/response.go +++ b/typedapi/ml/deletecalendar/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletecalendar // Response holds the response body struct for the package deletecalendar // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/delete_calendar/MlDeleteCalendarResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/delete_calendar/MlDeleteCalendarResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletecalendarevent/delete_calendar_event.go b/typedapi/ml/deletecalendarevent/delete_calendar_event.go index ac4286bff8..df3d0e7e78 100644 --- a/typedapi/ml/deletecalendarevent/delete_calendar_event.go +++ b/typedapi/ml/deletecalendarevent/delete_calendar_event.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes scheduled events from a calendar. package deletecalendarevent diff --git a/typedapi/ml/deletecalendarevent/response.go b/typedapi/ml/deletecalendarevent/response.go index f5af7cde97..db074c1edf 100644 --- a/typedapi/ml/deletecalendarevent/response.go +++ b/typedapi/ml/deletecalendarevent/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletecalendarevent // Response holds the response body struct for the package deletecalendarevent // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/delete_calendar_event/MlDeleteCalendarEventResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/delete_calendar_event/MlDeleteCalendarEventResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletecalendarjob/delete_calendar_job.go b/typedapi/ml/deletecalendarjob/delete_calendar_job.go index 22d33d9b40..0f2d1565a6 100644 --- a/typedapi/ml/deletecalendarjob/delete_calendar_job.go +++ b/typedapi/ml/deletecalendarjob/delete_calendar_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes anomaly detection jobs from a calendar. package deletecalendarjob diff --git a/typedapi/ml/deletecalendarjob/response.go b/typedapi/ml/deletecalendarjob/response.go index 6fc7f604c2..06d8e08f27 100644 --- a/typedapi/ml/deletecalendarjob/response.go +++ b/typedapi/ml/deletecalendarjob/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletecalendarjob @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Response holds the response body struct for the package deletecalendarjob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/delete_calendar_job/MlDeleteCalendarJobResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/delete_calendar_job/MlDeleteCalendarJobResponse.ts#L22-L31 type Response struct { // CalendarId A string that uniquely identifies a calendar. @@ -63,13 +64,13 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "calendar_id": if err := dec.Decode(&s.CalendarId); err != nil { - return err + return fmt.Errorf("%s | %w", "CalendarId", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,13 +85,13 @@ func (s *Response) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "JobIds", err) } s.JobIds = append(s.JobIds, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.JobIds); err != nil { - return err + return fmt.Errorf("%s | %w", "JobIds", err) } } diff --git a/typedapi/ml/deletedatafeed/delete_datafeed.go b/typedapi/ml/deletedatafeed/delete_datafeed.go index e85d1706ac..67b2f250ff 100644 --- a/typedapi/ml/deletedatafeed/delete_datafeed.go +++ b/typedapi/ml/deletedatafeed/delete_datafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an existing datafeed. package deletedatafeed diff --git a/typedapi/ml/deletedatafeed/response.go b/typedapi/ml/deletedatafeed/response.go index 53c3a3cbc1..ea9fa3ba9f 100644 --- a/typedapi/ml/deletedatafeed/response.go +++ b/typedapi/ml/deletedatafeed/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletedatafeed // Response holds the response body struct for the package deletedatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/delete_datafeed/MlDeleteDatafeedResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/delete_datafeed/MlDeleteDatafeedResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletedataframeanalytics/delete_data_frame_analytics.go b/typedapi/ml/deletedataframeanalytics/delete_data_frame_analytics.go index 903843ebcf..6613ce19f0 100644 --- a/typedapi/ml/deletedataframeanalytics/delete_data_frame_analytics.go +++ b/typedapi/ml/deletedataframeanalytics/delete_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an existing data frame analytics job. package deletedataframeanalytics diff --git a/typedapi/ml/deletedataframeanalytics/response.go b/typedapi/ml/deletedataframeanalytics/response.go index 67e580dcb2..7a5f913110 100644 --- a/typedapi/ml/deletedataframeanalytics/response.go +++ b/typedapi/ml/deletedataframeanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletedataframeanalytics // Response holds the response body struct for the package deletedataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/delete_data_frame_analytics/MlDeleteDataFrameAnalyticsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/delete_data_frame_analytics/MlDeleteDataFrameAnalyticsResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deleteexpireddata/delete_expired_data.go b/typedapi/ml/deleteexpireddata/delete_expired_data.go index 7e574ec8e8..244aad21ef 100644 --- a/typedapi/ml/deleteexpireddata/delete_expired_data.go +++ b/typedapi/ml/deleteexpireddata/delete_expired_data.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes expired and unused machine learning data. package deleteexpireddata diff --git a/typedapi/ml/deleteexpireddata/request.go b/typedapi/ml/deleteexpireddata/request.go index 767800c469..6afd59926f 100644 --- a/typedapi/ml/deleteexpireddata/request.go +++ b/typedapi/ml/deleteexpireddata/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deleteexpireddata @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package deleteexpireddata // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/delete_expired_data/MlDeleteExpiredDataRequest.ts#L25-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/delete_expired_data/MlDeleteExpiredDataRequest.ts#L25-L72 type Request struct { // RequestsPerSecond The desired requests per second for the deletion processes. The default @@ -82,7 +82,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "RequestsPerSecond", err) } f := float32(value) s.RequestsPerSecond = &f @@ -93,7 +93,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "timeout": if err := dec.Decode(&s.Timeout); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } } diff --git a/typedapi/ml/deleteexpireddata/response.go b/typedapi/ml/deleteexpireddata/response.go index 0b1ae7fbd7..16d0588da1 100644 --- a/typedapi/ml/deleteexpireddata/response.go +++ b/typedapi/ml/deleteexpireddata/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deleteexpireddata // Response holds the response body struct for the package deleteexpireddata // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/delete_expired_data/MlDeleteExpiredDataResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/delete_expired_data/MlDeleteExpiredDataResponse.ts#L20-L22 type Response struct { Deleted bool `json:"deleted"` } diff --git a/typedapi/ml/deletefilter/delete_filter.go b/typedapi/ml/deletefilter/delete_filter.go index 1139f9c556..fce8e96468 100644 --- a/typedapi/ml/deletefilter/delete_filter.go +++ b/typedapi/ml/deletefilter/delete_filter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes a filter. package deletefilter diff --git a/typedapi/ml/deletefilter/response.go b/typedapi/ml/deletefilter/response.go index be60b97974..255776f32c 100644 --- a/typedapi/ml/deletefilter/response.go +++ b/typedapi/ml/deletefilter/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletefilter // Response holds the response body struct for the package deletefilter // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/delete_filter/MlDeleteFilterResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/delete_filter/MlDeleteFilterResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deleteforecast/delete_forecast.go b/typedapi/ml/deleteforecast/delete_forecast.go index e9c01e42b7..0d046aab81 100644 --- a/typedapi/ml/deleteforecast/delete_forecast.go +++ b/typedapi/ml/deleteforecast/delete_forecast.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes forecasts from a machine learning job. package deleteforecast diff --git a/typedapi/ml/deleteforecast/response.go b/typedapi/ml/deleteforecast/response.go index 85267eb2a2..b1aaa581a1 100644 --- a/typedapi/ml/deleteforecast/response.go +++ b/typedapi/ml/deleteforecast/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deleteforecast // Response holds the response body struct for the package deleteforecast // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/delete_forecast/MlDeleteForecastResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/delete_forecast/MlDeleteForecastResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletejob/delete_job.go b/typedapi/ml/deletejob/delete_job.go index 32c2f53193..4c017d253e 100644 --- a/typedapi/ml/deletejob/delete_job.go +++ b/typedapi/ml/deletejob/delete_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an existing anomaly detection job. package deletejob diff --git a/typedapi/ml/deletejob/response.go b/typedapi/ml/deletejob/response.go index e87e756099..89d5aad198 100644 --- a/typedapi/ml/deletejob/response.go +++ b/typedapi/ml/deletejob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletejob // Response holds the response body struct for the package deletejob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/delete_job/MlDeleteJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/delete_job/MlDeleteJobResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletemodelsnapshot/delete_model_snapshot.go b/typedapi/ml/deletemodelsnapshot/delete_model_snapshot.go index 53695f13f6..d13f97d7a6 100644 --- a/typedapi/ml/deletemodelsnapshot/delete_model_snapshot.go +++ b/typedapi/ml/deletemodelsnapshot/delete_model_snapshot.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an existing model snapshot. package deletemodelsnapshot diff --git a/typedapi/ml/deletemodelsnapshot/response.go b/typedapi/ml/deletemodelsnapshot/response.go index 96c07a2c90..fa7a3c938f 100644 --- a/typedapi/ml/deletemodelsnapshot/response.go +++ b/typedapi/ml/deletemodelsnapshot/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletemodelsnapshot // Response holds the response body struct for the package deletemodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/delete_model_snapshot/MlDeleteModelSnapshotResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/delete_model_snapshot/MlDeleteModelSnapshotResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletetrainedmodel/delete_trained_model.go b/typedapi/ml/deletetrainedmodel/delete_trained_model.go index 071f6dc36b..8bcefc1dd3 100644 --- a/typedapi/ml/deletetrainedmodel/delete_trained_model.go +++ b/typedapi/ml/deletetrainedmodel/delete_trained_model.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an existing trained inference model that is currently not referenced // by an ingest pipeline. diff --git a/typedapi/ml/deletetrainedmodel/response.go b/typedapi/ml/deletetrainedmodel/response.go index 2c0785f98f..6767a8cb7d 100644 --- a/typedapi/ml/deletetrainedmodel/response.go +++ b/typedapi/ml/deletetrainedmodel/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletetrainedmodel // Response holds the response body struct for the package deletetrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/delete_trained_model/MlDeleteTrainedModelResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/delete_trained_model/MlDeleteTrainedModelResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletetrainedmodelalias/delete_trained_model_alias.go b/typedapi/ml/deletetrainedmodelalias/delete_trained_model_alias.go index f5a131712a..866963e2b8 100644 --- a/typedapi/ml/deletetrainedmodelalias/delete_trained_model_alias.go +++ b/typedapi/ml/deletetrainedmodelalias/delete_trained_model_alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes a model alias that refers to the trained model package deletetrainedmodelalias diff --git a/typedapi/ml/deletetrainedmodelalias/response.go b/typedapi/ml/deletetrainedmodelalias/response.go index 197c21b41f..cd2540b45e 100644 --- a/typedapi/ml/deletetrainedmodelalias/response.go +++ b/typedapi/ml/deletetrainedmodelalias/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletetrainedmodelalias // Response holds the response body struct for the package deletetrainedmodelalias // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/delete_trained_model_alias/MlDeleteTrainedModelAliasResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/delete_trained_model_alias/MlDeleteTrainedModelAliasResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/estimatemodelmemory/estimate_model_memory.go b/typedapi/ml/estimatemodelmemory/estimate_model_memory.go index 50d95fc03c..4be5e98caf 100644 --- a/typedapi/ml/estimatemodelmemory/estimate_model_memory.go +++ b/typedapi/ml/estimatemodelmemory/estimate_model_memory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Estimates the model memory package estimatemodelmemory diff --git a/typedapi/ml/estimatemodelmemory/request.go b/typedapi/ml/estimatemodelmemory/request.go index 81f308a454..05ec14239e 100644 --- a/typedapi/ml/estimatemodelmemory/request.go +++ b/typedapi/ml/estimatemodelmemory/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package estimatemodelmemory @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package estimatemodelmemory // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/estimate_model_memory/MlEstimateModelMemoryRequest.ts#L26-L61 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/estimate_model_memory/MlEstimateModelMemoryRequest.ts#L26-L61 type Request struct { // AnalysisConfig For a list of the properties that you can specify in the diff --git a/typedapi/ml/estimatemodelmemory/response.go b/typedapi/ml/estimatemodelmemory/response.go index 9a76b06876..6d60650eae 100644 --- a/typedapi/ml/estimatemodelmemory/response.go +++ b/typedapi/ml/estimatemodelmemory/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package estimatemodelmemory // Response holds the response body struct for the package estimatemodelmemory // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/estimate_model_memory/MlEstimateModelMemoryResponse.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/estimate_model_memory/MlEstimateModelMemoryResponse.ts#L20-L24 type Response struct { ModelMemoryEstimate string `json:"model_memory_estimate"` } diff --git a/typedapi/ml/evaluatedataframe/evaluate_data_frame.go b/typedapi/ml/evaluatedataframe/evaluate_data_frame.go index be3c79c401..22f86968e9 100644 --- a/typedapi/ml/evaluatedataframe/evaluate_data_frame.go +++ b/typedapi/ml/evaluatedataframe/evaluate_data_frame.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Evaluates the data frame analytics for an annotated index. package evaluatedataframe diff --git a/typedapi/ml/evaluatedataframe/request.go b/typedapi/ml/evaluatedataframe/request.go index 29eb5c6a28..11e0fb6a34 100644 --- a/typedapi/ml/evaluatedataframe/request.go +++ b/typedapi/ml/evaluatedataframe/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package evaluatedataframe @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package evaluatedataframe // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/MlEvaluateDataFrameRequest.ts#L25-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/MlEvaluateDataFrameRequest.ts#L25-L52 type Request struct { // Evaluation Defines the type of evaluation you want to perform. @@ -77,17 +77,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "evaluation": if err := dec.Decode(&s.Evaluation); err != nil { - return err + return fmt.Errorf("%s | %w", "Evaluation", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } } diff --git a/typedapi/ml/evaluatedataframe/response.go b/typedapi/ml/evaluatedataframe/response.go index 6a194d8469..4a86dd3f60 100644 --- a/typedapi/ml/evaluatedataframe/response.go +++ b/typedapi/ml/evaluatedataframe/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package evaluatedataframe @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package evaluatedataframe // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/MlEvaluateDataFrameResponse.ts#L26-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/MlEvaluateDataFrameResponse.ts#L26-L33 type Response struct { Classification *types.DataframeClassificationSummary `json:"classification,omitempty"` OutlierDetection *types.DataframeOutlierDetectionSummary `json:"outlier_detection,omitempty"` diff --git a/typedapi/ml/explaindataframeanalytics/explain_data_frame_analytics.go b/typedapi/ml/explaindataframeanalytics/explain_data_frame_analytics.go index 8a284efd79..67b8466146 100644 --- a/typedapi/ml/explaindataframeanalytics/explain_data_frame_analytics.go +++ b/typedapi/ml/explaindataframeanalytics/explain_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Explains a data frame analytics config. package explaindataframeanalytics diff --git a/typedapi/ml/explaindataframeanalytics/request.go b/typedapi/ml/explaindataframeanalytics/request.go index 7bad1d6ca1..e7f5f9deb3 100644 --- a/typedapi/ml/explaindataframeanalytics/request.go +++ b/typedapi/ml/explaindataframeanalytics/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package explaindataframeanalytics @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package explaindataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/explain_data_frame_analytics/MlExplainDataFrameAnalyticsRequest.ts#L30-L107 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/explain_data_frame_analytics/MlExplainDataFrameAnalyticsRequest.ts#L30-L107 type Request struct { // AllowLazyStart Specifies whether this job can start when there is insufficient machine diff --git a/typedapi/ml/explaindataframeanalytics/response.go b/typedapi/ml/explaindataframeanalytics/response.go index f325f50e2f..f8d97902f8 100644 --- a/typedapi/ml/explaindataframeanalytics/response.go +++ b/typedapi/ml/explaindataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package explaindataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package explaindataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/explain_data_frame_analytics/MlExplainDataFrameAnalyticsResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/explain_data_frame_analytics/MlExplainDataFrameAnalyticsResponse.ts#L25-L32 type Response struct { // FieldSelection An array of objects that explain selection for each field, sorted by the diff --git a/typedapi/ml/flushjob/flush_job.go b/typedapi/ml/flushjob/flush_job.go index a2c65ed45e..6f061be3a4 100644 --- a/typedapi/ml/flushjob/flush_job.go +++ b/typedapi/ml/flushjob/flush_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Forces any buffered data to be processed by the job. package flushjob diff --git a/typedapi/ml/flushjob/request.go b/typedapi/ml/flushjob/request.go index d112f9829e..97fd578b69 100644 --- a/typedapi/ml/flushjob/request.go +++ b/typedapi/ml/flushjob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package flushjob @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package flushjob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/flush_job/MlFlushJobRequest.ts#L24-L99 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/flush_job/MlFlushJobRequest.ts#L24-L99 type Request struct { // AdvanceTime Refer to the description for the `advance_time` query parameter. @@ -82,7 +82,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "advance_time": if err := dec.Decode(&s.AdvanceTime); err != nil { - return err + return fmt.Errorf("%s | %w", "AdvanceTime", err) } case "calc_interim": @@ -92,7 +92,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CalcInterim", err) } s.CalcInterim = &value case bool: @@ -101,17 +101,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "end": if err := dec.Decode(&s.End); err != nil { - return err + return fmt.Errorf("%s | %w", "End", err) } case "skip_time": if err := dec.Decode(&s.SkipTime); err != nil { - return err + return fmt.Errorf("%s | %w", "SkipTime", err) } case "start": if err := dec.Decode(&s.Start); err != nil { - return err + return fmt.Errorf("%s | %w", "Start", err) } } diff --git a/typedapi/ml/flushjob/response.go b/typedapi/ml/flushjob/response.go index 8e12b6132e..c1d175c84a 100644 --- a/typedapi/ml/flushjob/response.go +++ b/typedapi/ml/flushjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package flushjob // Response holds the response body struct for the package flushjob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/flush_job/MlFlushJobResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/flush_job/MlFlushJobResponse.ts#L22-L31 type Response struct { Flushed bool `json:"flushed"` // LastFinalizedBucketEnd Provides the timestamp (in milliseconds since the epoch) of the end of diff --git a/typedapi/ml/forecast/forecast.go b/typedapi/ml/forecast/forecast.go index 2256f8b237..0ae9f56daa 100644 --- a/typedapi/ml/forecast/forecast.go +++ b/typedapi/ml/forecast/forecast.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Predicts the future behavior of a time series by using its historical // behavior. diff --git a/typedapi/ml/forecast/request.go b/typedapi/ml/forecast/request.go index 40bf85f2e3..5607464a39 100644 --- a/typedapi/ml/forecast/request.go +++ b/typedapi/ml/forecast/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package forecast @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package forecast // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/forecast/MlForecastJobRequest.ts#L24-L87 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/forecast/MlForecastJobRequest.ts#L24-L87 type Request struct { // Duration Refer to the description for the `duration` query parameter. @@ -78,18 +78,18 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "duration": if err := dec.Decode(&s.Duration); err != nil { - return err + return fmt.Errorf("%s | %w", "Duration", err) } case "expires_in": if err := dec.Decode(&s.ExpiresIn); err != nil { - return err + return fmt.Errorf("%s | %w", "ExpiresIn", err) } case "max_model_memory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxModelMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/ml/forecast/response.go b/typedapi/ml/forecast/response.go index e5acedd79c..f81873cb2c 100644 --- a/typedapi/ml/forecast/response.go +++ b/typedapi/ml/forecast/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package forecast // Response holds the response body struct for the package forecast // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/forecast/MlForecastJobResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/forecast/MlForecastJobResponse.ts#L22-L27 type Response struct { Acknowledged bool `json:"acknowledged"` ForecastId string `json:"forecast_id"` diff --git a/typedapi/ml/getbuckets/get_buckets.go b/typedapi/ml/getbuckets/get_buckets.go index e4a367a2c7..b7a87388d9 100644 --- a/typedapi/ml/getbuckets/get_buckets.go +++ b/typedapi/ml/getbuckets/get_buckets.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves anomaly detection job results for one or more buckets. package getbuckets diff --git a/typedapi/ml/getbuckets/request.go b/typedapi/ml/getbuckets/request.go index 5625c0c555..036b63fb02 100644 --- a/typedapi/ml/getbuckets/request.go +++ b/typedapi/ml/getbuckets/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getbuckets @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package getbuckets // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_buckets/MlGetBucketsRequest.ts#L26-L133 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_buckets/MlGetBucketsRequest.ts#L26-L133 type Request struct { // AnomalyScore Refer to the description for the `anomaly_score` query parameter. @@ -92,7 +92,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AnomalyScore", err) } f := types.Float64(value) s.AnomalyScore = &f @@ -108,7 +108,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Desc", err) } s.Desc = &value case bool: @@ -117,7 +117,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "end": if err := dec.Decode(&s.End); err != nil { - return err + return fmt.Errorf("%s | %w", "End", err) } case "exclude_interim": @@ -127,7 +127,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ExcludeInterim", err) } s.ExcludeInterim = &value case bool: @@ -141,7 +141,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Expand", err) } s.Expand = &value case bool: @@ -150,17 +150,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "page": if err := dec.Decode(&s.Page); err != nil { - return err + return fmt.Errorf("%s | %w", "Page", err) } case "sort": if err := dec.Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } case "start": if err := dec.Decode(&s.Start); err != nil { - return err + return fmt.Errorf("%s | %w", "Start", err) } } diff --git a/typedapi/ml/getbuckets/response.go b/typedapi/ml/getbuckets/response.go index d63200274c..ef74830bac 100644 --- a/typedapi/ml/getbuckets/response.go +++ b/typedapi/ml/getbuckets/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getbuckets @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getbuckets // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_buckets/MlGetBucketsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_buckets/MlGetBucketsResponse.ts#L23-L28 type Response struct { Buckets []types.BucketSummary `json:"buckets"` Count int64 `json:"count"` diff --git a/typedapi/ml/getcalendarevents/get_calendar_events.go b/typedapi/ml/getcalendarevents/get_calendar_events.go index 13e86e08e8..b55405e5ad 100644 --- a/typedapi/ml/getcalendarevents/get_calendar_events.go +++ b/typedapi/ml/getcalendarevents/get_calendar_events.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information about the scheduled events in calendars. package getcalendarevents diff --git a/typedapi/ml/getcalendarevents/response.go b/typedapi/ml/getcalendarevents/response.go index e50ec79b26..e09d40c3f2 100644 --- a/typedapi/ml/getcalendarevents/response.go +++ b/typedapi/ml/getcalendarevents/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getcalendarevents @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getcalendarevents // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_calendar_events/MlGetCalendarEventsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_calendar_events/MlGetCalendarEventsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Events []types.CalendarEvent `json:"events"` diff --git a/typedapi/ml/getcalendars/get_calendars.go b/typedapi/ml/getcalendars/get_calendars.go index 5ddd300b66..832dea4705 100644 --- a/typedapi/ml/getcalendars/get_calendars.go +++ b/typedapi/ml/getcalendars/get_calendars.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves configuration information for calendars. package getcalendars diff --git a/typedapi/ml/getcalendars/request.go b/typedapi/ml/getcalendars/request.go index b0ec0ded21..69f0f8fa5e 100644 --- a/typedapi/ml/getcalendars/request.go +++ b/typedapi/ml/getcalendars/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getcalendars @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package getcalendars // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_calendars/MlGetCalendarsRequest.ts#L25-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_calendars/MlGetCalendarsRequest.ts#L25-L51 type Request struct { // Page This object is supported only when you omit the calendar identifier. diff --git a/typedapi/ml/getcalendars/response.go b/typedapi/ml/getcalendars/response.go index bbe40a6c1e..a322bd3983 100644 --- a/typedapi/ml/getcalendars/response.go +++ b/typedapi/ml/getcalendars/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getcalendars @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getcalendars // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_calendars/MlGetCalendarsResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_calendars/MlGetCalendarsResponse.ts#L23-L25 type Response struct { Calendars []types.Calendar `json:"calendars"` Count int64 `json:"count"` diff --git a/typedapi/ml/getcategories/get_categories.go b/typedapi/ml/getcategories/get_categories.go index 7620f3f3b0..1fe826406c 100644 --- a/typedapi/ml/getcategories/get_categories.go +++ b/typedapi/ml/getcategories/get_categories.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves anomaly detection job results for one or more categories. package getcategories diff --git a/typedapi/ml/getcategories/request.go b/typedapi/ml/getcategories/request.go index d786ae5d01..cb984f40b4 100644 --- a/typedapi/ml/getcategories/request.go +++ b/typedapi/ml/getcategories/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getcategories @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package getcategories // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_categories/MlGetCategoriesRequest.ts#L25-L70 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_categories/MlGetCategoriesRequest.ts#L25-L70 type Request struct { // Page Configures pagination. diff --git a/typedapi/ml/getcategories/response.go b/typedapi/ml/getcategories/response.go index 3a67c1351b..811e263a0f 100644 --- a/typedapi/ml/getcategories/response.go +++ b/typedapi/ml/getcategories/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getcategories @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getcategories // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_categories/MlGetCategoriesResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_categories/MlGetCategoriesResponse.ts#L23-L28 type Response struct { Categories []types.Category `json:"categories"` Count int64 `json:"count"` diff --git a/typedapi/ml/getdatafeeds/get_datafeeds.go b/typedapi/ml/getdatafeeds/get_datafeeds.go index 38774ac18b..77140b1133 100644 --- a/typedapi/ml/getdatafeeds/get_datafeeds.go +++ b/typedapi/ml/getdatafeeds/get_datafeeds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves configuration information for datafeeds. package getdatafeeds diff --git a/typedapi/ml/getdatafeeds/response.go b/typedapi/ml/getdatafeeds/response.go index f047f89bd5..6ae643eb18 100644 --- a/typedapi/ml/getdatafeeds/response.go +++ b/typedapi/ml/getdatafeeds/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getdatafeeds @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdatafeeds // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_datafeeds/MlGetDatafeedsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_datafeeds/MlGetDatafeedsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Datafeeds []types.MLDatafeed `json:"datafeeds"` diff --git a/typedapi/ml/getdatafeedstats/get_datafeed_stats.go b/typedapi/ml/getdatafeedstats/get_datafeed_stats.go index 579495521f..efa592d31a 100644 --- a/typedapi/ml/getdatafeedstats/get_datafeed_stats.go +++ b/typedapi/ml/getdatafeedstats/get_datafeed_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves usage information for datafeeds. package getdatafeedstats diff --git a/typedapi/ml/getdatafeedstats/response.go b/typedapi/ml/getdatafeedstats/response.go index 55b0b832eb..0bc17114ab 100644 --- a/typedapi/ml/getdatafeedstats/response.go +++ b/typedapi/ml/getdatafeedstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getdatafeedstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdatafeedstats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_datafeed_stats/MlGetDatafeedStatsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_datafeed_stats/MlGetDatafeedStatsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Datafeeds []types.DatafeedStats `json:"datafeeds"` diff --git a/typedapi/ml/getdataframeanalytics/get_data_frame_analytics.go b/typedapi/ml/getdataframeanalytics/get_data_frame_analytics.go index 42b09826c6..ccccb42dba 100644 --- a/typedapi/ml/getdataframeanalytics/get_data_frame_analytics.go +++ b/typedapi/ml/getdataframeanalytics/get_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves configuration information for data frame analytics jobs. package getdataframeanalytics diff --git a/typedapi/ml/getdataframeanalytics/response.go b/typedapi/ml/getdataframeanalytics/response.go index b7f8f0d4f4..ebaaa4e343 100644 --- a/typedapi/ml/getdataframeanalytics/response.go +++ b/typedapi/ml/getdataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getdataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_data_frame_analytics/MlGetDataFrameAnalyticsResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_data_frame_analytics/MlGetDataFrameAnalyticsResponse.ts#L23-L29 type Response struct { Count int `json:"count"` // DataFrameAnalytics An array of data frame analytics job resources, which are sorted by the id diff --git a/typedapi/ml/getdataframeanalyticsstats/get_data_frame_analytics_stats.go b/typedapi/ml/getdataframeanalyticsstats/get_data_frame_analytics_stats.go index 52654869ab..e88d4bc536 100644 --- a/typedapi/ml/getdataframeanalyticsstats/get_data_frame_analytics_stats.go +++ b/typedapi/ml/getdataframeanalyticsstats/get_data_frame_analytics_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves usage information for data frame analytics jobs. package getdataframeanalyticsstats diff --git a/typedapi/ml/getdataframeanalyticsstats/response.go b/typedapi/ml/getdataframeanalyticsstats/response.go index d53fffd859..51864fbb9e 100644 --- a/typedapi/ml/getdataframeanalyticsstats/response.go +++ b/typedapi/ml/getdataframeanalyticsstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getdataframeanalyticsstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdataframeanalyticsstats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_data_frame_analytics_stats/MlGetDataFrameAnalyticsStatsResponse.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_data_frame_analytics_stats/MlGetDataFrameAnalyticsStatsResponse.ts#L24-L30 type Response struct { Count int64 `json:"count"` // DataFrameAnalytics An array of objects that contain usage information for data frame analytics diff --git a/typedapi/ml/getfilters/get_filters.go b/typedapi/ml/getfilters/get_filters.go index 6f293bf98c..11735cfd76 100644 --- a/typedapi/ml/getfilters/get_filters.go +++ b/typedapi/ml/getfilters/get_filters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves filters. package getfilters diff --git a/typedapi/ml/getfilters/response.go b/typedapi/ml/getfilters/response.go index 48174c3fa4..34f588185e 100644 --- a/typedapi/ml/getfilters/response.go +++ b/typedapi/ml/getfilters/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getfilters @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getfilters // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_filters/MlGetFiltersResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_filters/MlGetFiltersResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Filters []types.MLFilter `json:"filters"` diff --git a/typedapi/ml/getinfluencers/get_influencers.go b/typedapi/ml/getinfluencers/get_influencers.go index e32e92fa23..e3af43c4ad 100644 --- a/typedapi/ml/getinfluencers/get_influencers.go +++ b/typedapi/ml/getinfluencers/get_influencers.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves anomaly detection job results for one or more influencers. package getinfluencers diff --git a/typedapi/ml/getinfluencers/request.go b/typedapi/ml/getinfluencers/request.go index a60ce0fd5c..8090b6a1c8 100644 --- a/typedapi/ml/getinfluencers/request.go +++ b/typedapi/ml/getinfluencers/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getinfluencers @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package getinfluencers // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_influencers/MlGetInfluencersRequest.ts#L26-L97 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_influencers/MlGetInfluencersRequest.ts#L26-L97 type Request struct { // Page Configures pagination. diff --git a/typedapi/ml/getinfluencers/response.go b/typedapi/ml/getinfluencers/response.go index 8d2357eac8..54a56ef92a 100644 --- a/typedapi/ml/getinfluencers/response.go +++ b/typedapi/ml/getinfluencers/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getinfluencers @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getinfluencers // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_influencers/MlGetInfluencersResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_influencers/MlGetInfluencersResponse.ts#L23-L29 type Response struct { Count int64 `json:"count"` // Influencers Array of influencer objects diff --git a/typedapi/ml/getjobs/get_jobs.go b/typedapi/ml/getjobs/get_jobs.go index 2aa826dca8..0dfa00d12b 100644 --- a/typedapi/ml/getjobs/get_jobs.go +++ b/typedapi/ml/getjobs/get_jobs.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves configuration information for anomaly detection jobs. package getjobs diff --git a/typedapi/ml/getjobs/response.go b/typedapi/ml/getjobs/response.go index 8a05468796..d2f16c0aa9 100644 --- a/typedapi/ml/getjobs/response.go +++ b/typedapi/ml/getjobs/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getjobs @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getjobs // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_jobs/MlGetJobsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_jobs/MlGetJobsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Jobs []types.Job `json:"jobs"` diff --git a/typedapi/ml/getjobstats/get_job_stats.go b/typedapi/ml/getjobstats/get_job_stats.go index a0d37bf731..f8c0c79ed4 100644 --- a/typedapi/ml/getjobstats/get_job_stats.go +++ b/typedapi/ml/getjobstats/get_job_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves usage information for anomaly detection jobs. package getjobstats diff --git a/typedapi/ml/getjobstats/response.go b/typedapi/ml/getjobstats/response.go index a9008dd11b..ddf6192015 100644 --- a/typedapi/ml/getjobstats/response.go +++ b/typedapi/ml/getjobstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getjobstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getjobstats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_job_stats/MlGetJobStatsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_job_stats/MlGetJobStatsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Jobs []types.JobStats `json:"jobs"` diff --git a/typedapi/ml/getmemorystats/get_memory_stats.go b/typedapi/ml/getmemorystats/get_memory_stats.go index e74006af1e..039a870fee 100644 --- a/typedapi/ml/getmemorystats/get_memory_stats.go +++ b/typedapi/ml/getmemorystats/get_memory_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information on how ML is using memory. package getmemorystats diff --git a/typedapi/ml/getmemorystats/response.go b/typedapi/ml/getmemorystats/response.go index 00ad37c061..aa8d87bd7e 100644 --- a/typedapi/ml/getmemorystats/response.go +++ b/typedapi/ml/getmemorystats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getmemorystats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getmemorystats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_memory_stats/MlGetMemoryStatsResponse.ts#L25-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_memory_stats/MlGetMemoryStatsResponse.ts#L25-L31 type Response struct { ClusterName string `json:"cluster_name"` NodeStats types.NodeStatistics `json:"_nodes"` diff --git a/typedapi/ml/getmodelsnapshots/get_model_snapshots.go b/typedapi/ml/getmodelsnapshots/get_model_snapshots.go index 35eeee71c8..d217694a3a 100644 --- a/typedapi/ml/getmodelsnapshots/get_model_snapshots.go +++ b/typedapi/ml/getmodelsnapshots/get_model_snapshots.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information about model snapshots. package getmodelsnapshots diff --git a/typedapi/ml/getmodelsnapshots/request.go b/typedapi/ml/getmodelsnapshots/request.go index 300d15d1b2..23fd17132a 100644 --- a/typedapi/ml/getmodelsnapshots/request.go +++ b/typedapi/ml/getmodelsnapshots/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getmodelsnapshots @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package getmodelsnapshots // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_model_snapshots/MlGetModelSnapshotsRequest.ts#L26-L96 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_model_snapshots/MlGetModelSnapshotsRequest.ts#L26-L96 type Request struct { // Desc Refer to the description for the `desc` query parameter. @@ -86,7 +86,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Desc", err) } s.Desc = &value case bool: @@ -95,22 +95,22 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "end": if err := dec.Decode(&s.End); err != nil { - return err + return fmt.Errorf("%s | %w", "End", err) } case "page": if err := dec.Decode(&s.Page); err != nil { - return err + return fmt.Errorf("%s | %w", "Page", err) } case "sort": if err := dec.Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } case "start": if err := dec.Decode(&s.Start); err != nil { - return err + return fmt.Errorf("%s | %w", "Start", err) } } diff --git a/typedapi/ml/getmodelsnapshots/response.go b/typedapi/ml/getmodelsnapshots/response.go index 477c10e854..81d35fa47e 100644 --- a/typedapi/ml/getmodelsnapshots/response.go +++ b/typedapi/ml/getmodelsnapshots/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getmodelsnapshots @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getmodelsnapshots // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_model_snapshots/MlGetModelSnapshotsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_model_snapshots/MlGetModelSnapshotsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` ModelSnapshots []types.ModelSnapshot `json:"model_snapshots"` diff --git a/typedapi/ml/getmodelsnapshotupgradestats/get_model_snapshot_upgrade_stats.go b/typedapi/ml/getmodelsnapshotupgradestats/get_model_snapshot_upgrade_stats.go index a2c95163a2..d949e01572 100644 --- a/typedapi/ml/getmodelsnapshotupgradestats/get_model_snapshot_upgrade_stats.go +++ b/typedapi/ml/getmodelsnapshotupgradestats/get_model_snapshot_upgrade_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Gets stats for anomaly detection job model snapshot upgrades that are in // progress. diff --git a/typedapi/ml/getmodelsnapshotupgradestats/response.go b/typedapi/ml/getmodelsnapshotupgradestats/response.go index e9fcaaae07..d74524b1d0 100644 --- a/typedapi/ml/getmodelsnapshotupgradestats/response.go +++ b/typedapi/ml/getmodelsnapshotupgradestats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getmodelsnapshotupgradestats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getmodelsnapshotupgradestats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_model_snapshot_upgrade_stats/MlGetModelSnapshotUpgradeStatsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_model_snapshot_upgrade_stats/MlGetModelSnapshotUpgradeStatsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` ModelSnapshotUpgrades []types.ModelSnapshotUpgrade `json:"model_snapshot_upgrades"` diff --git a/typedapi/ml/getoverallbuckets/get_overall_buckets.go b/typedapi/ml/getoverallbuckets/get_overall_buckets.go index c07bc41b33..75e34609e7 100644 --- a/typedapi/ml/getoverallbuckets/get_overall_buckets.go +++ b/typedapi/ml/getoverallbuckets/get_overall_buckets.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves overall bucket results that summarize the bucket results of // multiple anomaly detection jobs. diff --git a/typedapi/ml/getoverallbuckets/request.go b/typedapi/ml/getoverallbuckets/request.go index 963c468fb4..35c424522e 100644 --- a/typedapi/ml/getoverallbuckets/request.go +++ b/typedapi/ml/getoverallbuckets/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getoverallbuckets @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package getoverallbuckets // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_overall_buckets/MlGetOverallBucketsRequest.ts#L25-L143 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_overall_buckets/MlGetOverallBucketsRequest.ts#L25-L143 type Request struct { // AllowNoMatch Refer to the description for the `allow_no_match` query parameter. @@ -91,7 +91,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowNoMatch", err) } s.AllowNoMatch = &value case bool: @@ -100,12 +100,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "bucket_span": if err := dec.Decode(&s.BucketSpan); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketSpan", err) } case "end": if err := dec.Decode(&s.End); err != nil { - return err + return fmt.Errorf("%s | %w", "End", err) } case "exclude_interim": @@ -115,7 +115,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ExcludeInterim", err) } s.ExcludeInterim = &value case bool: @@ -125,7 +125,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "overall_score": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "OverallScore", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -136,7 +136,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "start": if err := dec.Decode(&s.Start); err != nil { - return err + return fmt.Errorf("%s | %w", "Start", err) } case "top_n": @@ -147,7 +147,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TopN", err) } s.TopN = &value case float64: diff --git a/typedapi/ml/getoverallbuckets/response.go b/typedapi/ml/getoverallbuckets/response.go index 704c1c3de4..65f53b4d61 100644 --- a/typedapi/ml/getoverallbuckets/response.go +++ b/typedapi/ml/getoverallbuckets/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getoverallbuckets @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getoverallbuckets // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_overall_buckets/MlGetOverallBucketsResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_overall_buckets/MlGetOverallBucketsResponse.ts#L23-L29 type Response struct { Count int64 `json:"count"` // OverallBuckets Array of overall bucket objects diff --git a/typedapi/ml/getrecords/get_records.go b/typedapi/ml/getrecords/get_records.go index 9abcde218f..c3fafc05f6 100644 --- a/typedapi/ml/getrecords/get_records.go +++ b/typedapi/ml/getrecords/get_records.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves anomaly records for an anomaly detection job. package getrecords diff --git a/typedapi/ml/getrecords/request.go b/typedapi/ml/getrecords/request.go index ad12ac5664..ab745b3fb2 100644 --- a/typedapi/ml/getrecords/request.go +++ b/typedapi/ml/getrecords/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getrecords @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package getrecords // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_records/MlGetAnomalyRecordsRequest.ts#L26-L127 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_records/MlGetAnomalyRecordsRequest.ts#L26-L127 type Request struct { // Desc Refer to the description for the `desc` query parameter. @@ -90,7 +90,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Desc", err) } s.Desc = &value case bool: @@ -99,7 +99,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "end": if err := dec.Decode(&s.End); err != nil { - return err + return fmt.Errorf("%s | %w", "End", err) } case "exclude_interim": @@ -109,7 +109,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ExcludeInterim", err) } s.ExcludeInterim = &value case bool: @@ -118,7 +118,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "page": if err := dec.Decode(&s.Page); err != nil { - return err + return fmt.Errorf("%s | %w", "Page", err) } case "record_score": @@ -128,7 +128,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RecordScore", err) } f := types.Float64(value) s.RecordScore = &f @@ -139,12 +139,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "sort": if err := dec.Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } case "start": if err := dec.Decode(&s.Start); err != nil { - return err + return fmt.Errorf("%s | %w", "Start", err) } } diff --git a/typedapi/ml/getrecords/response.go b/typedapi/ml/getrecords/response.go index 5f3f8c4156..5461e66421 100644 --- a/typedapi/ml/getrecords/response.go +++ b/typedapi/ml/getrecords/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getrecords @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrecords // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_records/MlGetAnomalyRecordsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_records/MlGetAnomalyRecordsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Records []types.Anomaly `json:"records"` diff --git a/typedapi/ml/gettrainedmodels/get_trained_models.go b/typedapi/ml/gettrainedmodels/get_trained_models.go index 0a905228b1..cfc7717c18 100644 --- a/typedapi/ml/gettrainedmodels/get_trained_models.go +++ b/typedapi/ml/gettrainedmodels/get_trained_models.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves configuration information for a trained inference model. package gettrainedmodels diff --git a/typedapi/ml/gettrainedmodels/response.go b/typedapi/ml/gettrainedmodels/response.go index 0a8c92e417..98a9ffc088 100644 --- a/typedapi/ml/gettrainedmodels/response.go +++ b/typedapi/ml/gettrainedmodels/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package gettrainedmodels @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettrainedmodels // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_trained_models/MlGetTrainedModelResponse.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_trained_models/MlGetTrainedModelResponse.ts#L23-L34 type Response struct { Count int `json:"count"` // TrainedModelConfigs An array of trained model resources, which are sorted by the model_id value diff --git a/typedapi/ml/gettrainedmodelsstats/get_trained_models_stats.go b/typedapi/ml/gettrainedmodelsstats/get_trained_models_stats.go index 918802ea06..ad77ec6535 100644 --- a/typedapi/ml/gettrainedmodelsstats/get_trained_models_stats.go +++ b/typedapi/ml/gettrainedmodelsstats/get_trained_models_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves usage information for trained inference models. package gettrainedmodelsstats diff --git a/typedapi/ml/gettrainedmodelsstats/response.go b/typedapi/ml/gettrainedmodelsstats/response.go index 8d6a4037ca..a03754de92 100644 --- a/typedapi/ml/gettrainedmodelsstats/response.go +++ b/typedapi/ml/gettrainedmodelsstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package gettrainedmodelsstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettrainedmodelsstats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_trained_models_stats/MlGetTrainedModelStatsResponse.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_trained_models_stats/MlGetTrainedModelStatsResponse.ts#L23-L33 type Response struct { // Count The total number of trained model statistics that matched the requested ID diff --git a/typedapi/ml/infertrainedmodel/infer_trained_model.go b/typedapi/ml/infertrainedmodel/infer_trained_model.go index 34cd0fe4e3..c4f00932c9 100644 --- a/typedapi/ml/infertrainedmodel/infer_trained_model.go +++ b/typedapi/ml/infertrainedmodel/infer_trained_model.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Evaluate a trained model. package infertrainedmodel diff --git a/typedapi/ml/infertrainedmodel/request.go b/typedapi/ml/infertrainedmodel/request.go index f5ee550c60..e7e3b1bb9d 100644 --- a/typedapi/ml/infertrainedmodel/request.go +++ b/typedapi/ml/infertrainedmodel/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package infertrainedmodel @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package infertrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/infer_trained_model/MlInferTrainedModelRequest.ts#L27-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/infer_trained_model/MlInferTrainedModelRequest.ts#L27-L59 type Request struct { // Docs An array of objects to pass to the model for inference. The objects should diff --git a/typedapi/ml/infertrainedmodel/response.go b/typedapi/ml/infertrainedmodel/response.go index 6d20da834e..5ff3ffc279 100644 --- a/typedapi/ml/infertrainedmodel/response.go +++ b/typedapi/ml/infertrainedmodel/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package infertrainedmodel @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package infertrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/infer_trained_model/MlInferTrainedModelResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/infer_trained_model/MlInferTrainedModelResponse.ts#L22-L26 type Response struct { InferenceResults []types.InferenceResponseResult `json:"inference_results"` } diff --git a/typedapi/ml/info/info.go b/typedapi/ml/info/info.go index 7530dc4e2a..f7e6985451 100644 --- a/typedapi/ml/info/info.go +++ b/typedapi/ml/info/info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns defaults and limits used by machine learning. package info diff --git a/typedapi/ml/info/response.go b/typedapi/ml/info/response.go index 352c2ac79e..3e4feacf08 100644 --- a/typedapi/ml/info/response.go +++ b/typedapi/ml/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/info/MlInfoResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/info/MlInfoResponse.ts#L22-L29 type Response struct { Defaults types.Defaults `json:"defaults"` Limits types.Limits `json:"limits"` diff --git a/typedapi/ml/openjob/open_job.go b/typedapi/ml/openjob/open_job.go index 3d2eff9d04..8ea9b1abc6 100644 --- a/typedapi/ml/openjob/open_job.go +++ b/typedapi/ml/openjob/open_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Opens one or more anomaly detection jobs. package openjob diff --git a/typedapi/ml/openjob/request.go b/typedapi/ml/openjob/request.go index 0e17228311..504d027ecd 100644 --- a/typedapi/ml/openjob/request.go +++ b/typedapi/ml/openjob/request.go @@ -16,23 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package openjob import ( - "bytes" "encoding/json" - "errors" "fmt" - "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // Request holds the request body struct for the package openjob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/open_job/MlOpenJobRequest.ts#L24-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/open_job/MlOpenJobRequest.ts#L24-L59 type Request struct { // Timeout Refer to the description for the `timeout` query parameter. @@ -56,27 +53,3 @@ func (r *Request) FromJSON(data string) (*Request, error) { return &req, nil } - -func (s *Request) UnmarshalJSON(data []byte) error { - dec := json.NewDecoder(bytes.NewReader(data)) - - for { - t, err := dec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "timeout": - if err := dec.Decode(&s.Timeout); err != nil { - return err - } - - } - } - return nil -} diff --git a/typedapi/ml/openjob/response.go b/typedapi/ml/openjob/response.go index d94e6f1951..d596a7ad52 100644 --- a/typedapi/ml/openjob/response.go +++ b/typedapi/ml/openjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package openjob // Response holds the response body struct for the package openjob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/open_job/MlOpenJobResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/open_job/MlOpenJobResponse.ts#L22-L31 type Response struct { // Node The ID of the node that the job was started on. In serverless this will be diff --git a/typedapi/ml/postcalendarevents/post_calendar_events.go b/typedapi/ml/postcalendarevents/post_calendar_events.go index ce8959cc33..a875b928c8 100644 --- a/typedapi/ml/postcalendarevents/post_calendar_events.go +++ b/typedapi/ml/postcalendarevents/post_calendar_events.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Posts scheduled events in a calendar. package postcalendarevents diff --git a/typedapi/ml/postcalendarevents/request.go b/typedapi/ml/postcalendarevents/request.go index 1a0e5f8e8a..98b9a57a92 100644 --- a/typedapi/ml/postcalendarevents/request.go +++ b/typedapi/ml/postcalendarevents/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package postcalendarevents @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package postcalendarevents // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/post_calendar_events/MlPostCalendarEventsRequest.ts#L24-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/post_calendar_events/MlPostCalendarEventsRequest.ts#L24-L40 type Request struct { // Events A list of one of more scheduled events. The event’s start and end times can diff --git a/typedapi/ml/postcalendarevents/response.go b/typedapi/ml/postcalendarevents/response.go index 68297272bf..ea437779da 100644 --- a/typedapi/ml/postcalendarevents/response.go +++ b/typedapi/ml/postcalendarevents/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package postcalendarevents @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package postcalendarevents // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/post_calendar_events/MlPostCalendarEventsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/post_calendar_events/MlPostCalendarEventsResponse.ts#L22-L24 type Response struct { Events []types.CalendarEvent `json:"events"` } diff --git a/typedapi/ml/postdata/post_data.go b/typedapi/ml/postdata/post_data.go index 55386415a8..a09a16f18d 100644 --- a/typedapi/ml/postdata/post_data.go +++ b/typedapi/ml/postdata/post_data.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Sends data to an anomaly detection job for analysis. package postdata diff --git a/typedapi/ml/postdata/request.go b/typedapi/ml/postdata/request.go index 4dfbfd51ab..1d12b308ad 100644 --- a/typedapi/ml/postdata/request.go +++ b/typedapi/ml/postdata/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package postdata @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package postdata // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/post_data/MlPostJobDataRequest.ts#L24-L68 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/post_data/MlPostJobDataRequest.ts#L24-L68 type Request = []json.RawMessage diff --git a/typedapi/ml/postdata/response.go b/typedapi/ml/postdata/response.go index f3b1846cb2..fd6d128a9c 100644 --- a/typedapi/ml/postdata/response.go +++ b/typedapi/ml/postdata/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package postdata // Response holds the response body struct for the package postdata // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/post_data/MlPostJobDataResponse.ts#L23-L41 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/post_data/MlPostJobDataResponse.ts#L23-L41 type Response struct { BucketCount int64 `json:"bucket_count"` EarliestRecordTimestamp int64 `json:"earliest_record_timestamp"` diff --git a/typedapi/ml/previewdatafeed/preview_datafeed.go b/typedapi/ml/previewdatafeed/preview_datafeed.go index 9cde031b8b..a1f7726fbb 100644 --- a/typedapi/ml/previewdatafeed/preview_datafeed.go +++ b/typedapi/ml/previewdatafeed/preview_datafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Previews a datafeed. package previewdatafeed diff --git a/typedapi/ml/previewdatafeed/request.go b/typedapi/ml/previewdatafeed/request.go index afabbd8ceb..25ece72dd0 100644 --- a/typedapi/ml/previewdatafeed/request.go +++ b/typedapi/ml/previewdatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package previewdatafeed @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package previewdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/preview_datafeed/MlPreviewDatafeedRequest.ts#L26-L69 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/preview_datafeed/MlPreviewDatafeedRequest.ts#L26-L69 type Request struct { // DatafeedConfig The datafeed definition to preview. diff --git a/typedapi/ml/previewdatafeed/response.go b/typedapi/ml/previewdatafeed/response.go index a759dbba55..f26f68e558 100644 --- a/typedapi/ml/previewdatafeed/response.go +++ b/typedapi/ml/previewdatafeed/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package previewdatafeed @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package previewdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/preview_datafeed/MlPreviewDatafeedResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/preview_datafeed/MlPreviewDatafeedResponse.ts#L20-L22 type Response []json.RawMessage diff --git a/typedapi/ml/previewdataframeanalytics/preview_data_frame_analytics.go b/typedapi/ml/previewdataframeanalytics/preview_data_frame_analytics.go index 743e731eff..3cc589af23 100644 --- a/typedapi/ml/previewdataframeanalytics/preview_data_frame_analytics.go +++ b/typedapi/ml/previewdataframeanalytics/preview_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Previews that will be analyzed given a data frame analytics config. package previewdataframeanalytics diff --git a/typedapi/ml/previewdataframeanalytics/request.go b/typedapi/ml/previewdataframeanalytics/request.go index 350a442edb..f5a174bd25 100644 --- a/typedapi/ml/previewdataframeanalytics/request.go +++ b/typedapi/ml/previewdataframeanalytics/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package previewdataframeanalytics @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package previewdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/preview_data_frame_analytics/MlPreviewDataFrameAnalyticsRequest.ts#L24-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/preview_data_frame_analytics/MlPreviewDataFrameAnalyticsRequest.ts#L24-L47 type Request struct { // Config A data frame analytics config as described in create data frame analytics diff --git a/typedapi/ml/previewdataframeanalytics/response.go b/typedapi/ml/previewdataframeanalytics/response.go index a5225ad1a8..95baac521a 100644 --- a/typedapi/ml/previewdataframeanalytics/response.go +++ b/typedapi/ml/previewdataframeanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package previewdataframeanalytics // Response holds the response body struct for the package previewdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/preview_data_frame_analytics/MlPreviewDataFrameAnalyticsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/preview_data_frame_analytics/MlPreviewDataFrameAnalyticsResponse.ts#L23-L28 type Response struct { // FeatureValues An array of objects that contain feature name and value pairs. The features diff --git a/typedapi/ml/putcalendar/put_calendar.go b/typedapi/ml/putcalendar/put_calendar.go index 96a4dd1485..5a2afca81d 100644 --- a/typedapi/ml/putcalendar/put_calendar.go +++ b/typedapi/ml/putcalendar/put_calendar.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Instantiates a calendar. package putcalendar diff --git a/typedapi/ml/putcalendar/request.go b/typedapi/ml/putcalendar/request.go index 7528f33689..b34c38cdaf 100644 --- a/typedapi/ml/putcalendar/request.go +++ b/typedapi/ml/putcalendar/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putcalendar @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package putcalendar // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_calendar/MlPutCalendarRequest.ts#L23-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_calendar/MlPutCalendarRequest.ts#L23-L43 type Request struct { // Description A description of the calendar. diff --git a/typedapi/ml/putcalendar/response.go b/typedapi/ml/putcalendar/response.go index 03b3136ffd..ac2a384d87 100644 --- a/typedapi/ml/putcalendar/response.go +++ b/typedapi/ml/putcalendar/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putcalendar @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Response holds the response body struct for the package putcalendar // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_calendar/MlPutCalendarResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_calendar/MlPutCalendarResponse.ts#L22-L31 type Response struct { // CalendarId A string that uniquely identifies a calendar. @@ -63,13 +64,13 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "calendar_id": if err := dec.Decode(&s.CalendarId); err != nil { - return err + return fmt.Errorf("%s | %w", "CalendarId", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,13 +85,13 @@ func (s *Response) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "JobIds", err) } s.JobIds = append(s.JobIds, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.JobIds); err != nil { - return err + return fmt.Errorf("%s | %w", "JobIds", err) } } diff --git a/typedapi/ml/putcalendarjob/put_calendar_job.go b/typedapi/ml/putcalendarjob/put_calendar_job.go index 5408c82961..10d359547d 100644 --- a/typedapi/ml/putcalendarjob/put_calendar_job.go +++ b/typedapi/ml/putcalendarjob/put_calendar_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Adds an anomaly detection job to a calendar. package putcalendarjob diff --git a/typedapi/ml/putcalendarjob/response.go b/typedapi/ml/putcalendarjob/response.go index d591150411..bf09d09a19 100644 --- a/typedapi/ml/putcalendarjob/response.go +++ b/typedapi/ml/putcalendarjob/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putcalendarjob @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Response holds the response body struct for the package putcalendarjob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_calendar_job/MlPutCalendarJobResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_calendar_job/MlPutCalendarJobResponse.ts#L22-L31 type Response struct { // CalendarId A string that uniquely identifies a calendar. @@ -63,13 +64,13 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "calendar_id": if err := dec.Decode(&s.CalendarId); err != nil { - return err + return fmt.Errorf("%s | %w", "CalendarId", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,13 +85,13 @@ func (s *Response) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "JobIds", err) } s.JobIds = append(s.JobIds, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.JobIds); err != nil { - return err + return fmt.Errorf("%s | %w", "JobIds", err) } } diff --git a/typedapi/ml/putdatafeed/put_datafeed.go b/typedapi/ml/putdatafeed/put_datafeed.go index 14b4bdaa8d..38f7f8ca48 100644 --- a/typedapi/ml/putdatafeed/put_datafeed.go +++ b/typedapi/ml/putdatafeed/put_datafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Instantiates a datafeed. package putdatafeed diff --git a/typedapi/ml/putdatafeed/request.go b/typedapi/ml/putdatafeed/request.go index cbdb0a35df..22f4e4aff9 100644 --- a/typedapi/ml/putdatafeed/request.go +++ b/typedapi/ml/putdatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putdatafeed @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_datafeed/MlPutDatafeedRequest.ts#L37-L172 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_datafeed/MlPutDatafeedRequest.ts#L37-L172 type Request struct { // Aggregations If set, the datafeed performs aggregation searches. @@ -155,27 +155,27 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Aggregations = make(map[string]types.Aggregations, 0) } if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "chunking_config": if err := dec.Decode(&s.ChunkingConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "ChunkingConfig", err) } case "delayed_data_check_config": if err := dec.Decode(&s.DelayedDataCheckConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "DelayedDataCheckConfig", err) } case "frequency": if err := dec.Decode(&s.Frequency); err != nil { - return err + return fmt.Errorf("%s | %w", "Frequency", err) } case "headers": if err := dec.Decode(&s.Headers); err != nil { - return err + return fmt.Errorf("%s | %w", "Headers", err) } case "indices", "indexes": @@ -184,24 +184,24 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } case "indices_options": if err := dec.Decode(&s.IndicesOptions); err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesOptions", err) } case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "max_empty_searches": @@ -212,7 +212,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxEmptySearches", err) } s.MaxEmptySearches = &value case float64: @@ -222,17 +222,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "query_delay": if err := dec.Decode(&s.QueryDelay); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryDelay", err) } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } case "script_fields": @@ -240,7 +240,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.ScriptFields = make(map[string]types.ScriptField, 0) } if err := dec.Decode(&s.ScriptFields); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptFields", err) } case "scroll_size": @@ -251,7 +251,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollSize", err) } s.ScrollSize = &value case float64: diff --git a/typedapi/ml/putdatafeed/response.go b/typedapi/ml/putdatafeed/response.go index a687d490b0..27c35d1fd9 100644 --- a/typedapi/ml/putdatafeed/response.go +++ b/typedapi/ml/putdatafeed/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putdatafeed @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_datafeed/MlPutDatafeedResponse.ts#L31-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_datafeed/MlPutDatafeedResponse.ts#L31-L49 type Response struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Authorization *types.DatafeedAuthorization `json:"authorization,omitempty"` diff --git a/typedapi/ml/putdataframeanalytics/put_data_frame_analytics.go b/typedapi/ml/putdataframeanalytics/put_data_frame_analytics.go index 3b259bdb37..87c78adc75 100644 --- a/typedapi/ml/putdataframeanalytics/put_data_frame_analytics.go +++ b/typedapi/ml/putdataframeanalytics/put_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Instantiates a data frame analytics job. package putdataframeanalytics diff --git a/typedapi/ml/putdataframeanalytics/request.go b/typedapi/ml/putdataframeanalytics/request.go index a2bc9e8e60..ac391b9be3 100644 --- a/typedapi/ml/putdataframeanalytics/request.go +++ b/typedapi/ml/putdataframeanalytics/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putdataframeanalytics @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_data_frame_analytics/MlPutDataFrameAnalyticsRequest.ts#L30-L141 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_data_frame_analytics/MlPutDataFrameAnalyticsRequest.ts#L30-L141 type Request struct { // AllowLazyStart Specifies whether this job can start when there is insufficient machine @@ -138,7 +138,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowLazyStart", err) } s.AllowLazyStart = &value case bool: @@ -147,18 +147,18 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "analysis": if err := dec.Decode(&s.Analysis); err != nil { - return err + return fmt.Errorf("%s | %w", "Analysis", err) } case "analyzed_fields": if err := dec.Decode(&s.AnalyzedFields); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalyzedFields", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -169,12 +169,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "dest": if err := dec.Decode(&s.Dest); err != nil { - return err + return fmt.Errorf("%s | %w", "Dest", err) } case "headers": if err := dec.Decode(&s.Headers); err != nil { - return err + return fmt.Errorf("%s | %w", "Headers", err) } case "max_num_threads": @@ -185,7 +185,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxNumThreads", err) } s.MaxNumThreads = &value case float64: @@ -196,7 +196,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "model_memory_limit": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelMemoryLimit", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -207,12 +207,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "source": if err := dec.Decode(&s.Source); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/ml/putdataframeanalytics/response.go b/typedapi/ml/putdataframeanalytics/response.go index e7171782c2..b2a118a5dc 100644 --- a/typedapi/ml/putdataframeanalytics/response.go +++ b/typedapi/ml/putdataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putdataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_data_frame_analytics/MlPutDataFrameAnalyticsResponse.ts#L31-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_data_frame_analytics/MlPutDataFrameAnalyticsResponse.ts#L31-L46 type Response struct { AllowLazyStart bool `json:"allow_lazy_start"` Analysis types.DataframeAnalysisContainer `json:"analysis"` diff --git a/typedapi/ml/putfilter/put_filter.go b/typedapi/ml/putfilter/put_filter.go index 759daa3318..d9f8836488 100644 --- a/typedapi/ml/putfilter/put_filter.go +++ b/typedapi/ml/putfilter/put_filter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Instantiates a filter. package putfilter diff --git a/typedapi/ml/putfilter/request.go b/typedapi/ml/putfilter/request.go index 279c4dbe12..afabf32068 100644 --- a/typedapi/ml/putfilter/request.go +++ b/typedapi/ml/putfilter/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putfilter @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package putfilter // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_filter/MlPutFilterRequest.ts#L23-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_filter/MlPutFilterRequest.ts#L23-L50 type Request struct { // Description A description of the filter. diff --git a/typedapi/ml/putfilter/response.go b/typedapi/ml/putfilter/response.go index 06f547ec10..cf1afce63b 100644 --- a/typedapi/ml/putfilter/response.go +++ b/typedapi/ml/putfilter/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putfilter // Response holds the response body struct for the package putfilter // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_filter/MlPutFilterResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_filter/MlPutFilterResponse.ts#L22-L28 type Response struct { Description string `json:"description"` FilterId string `json:"filter_id"` diff --git a/typedapi/ml/putjob/put_job.go b/typedapi/ml/putjob/put_job.go index 13f83ee566..ddd4cd75c0 100644 --- a/typedapi/ml/putjob/put_job.go +++ b/typedapi/ml/putjob/put_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Instantiates an anomaly detection job. package putjob diff --git a/typedapi/ml/putjob/request.go b/typedapi/ml/putjob/request.go index c55d27d5f7..cbb7940f9e 100644 --- a/typedapi/ml/putjob/request.go +++ b/typedapi/ml/putjob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putjob @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putjob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_job/MlPutJobRequest.ts#L30-L111 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_job/MlPutJobRequest.ts#L30-L111 type Request struct { // AllowLazyOpen Advanced configuration option. Specifies whether this job can open when there @@ -156,7 +156,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowLazyOpen", err) } s.AllowLazyOpen = &value case bool: @@ -165,22 +165,22 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "analysis_config": if err := dec.Decode(&s.AnalysisConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalysisConfig", err) } case "analysis_limits": if err := dec.Decode(&s.AnalysisLimits); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalysisLimits", err) } case "background_persist_interval": if err := dec.Decode(&s.BackgroundPersistInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "BackgroundPersistInterval", err) } case "custom_settings": if err := dec.Decode(&s.CustomSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "CustomSettings", err) } case "daily_model_snapshot_retention_after_days": @@ -190,7 +190,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DailyModelSnapshotRetentionAfterDays", err) } s.DailyModelSnapshotRetentionAfterDays = &value case float64: @@ -200,18 +200,18 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "data_description": if err := dec.Decode(&s.DataDescription); err != nil { - return err + return fmt.Errorf("%s | %w", "DataDescription", err) } case "datafeed_config": if err := dec.Decode(&s.DatafeedConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "DatafeedConfig", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -222,12 +222,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "groups": if err := dec.Decode(&s.Groups); err != nil { - return err + return fmt.Errorf("%s | %w", "Groups", err) } case "model_plot_config": if err := dec.Decode(&s.ModelPlotConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelPlotConfig", err) } case "model_snapshot_retention_days": @@ -237,7 +237,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSnapshotRetentionDays", err) } s.ModelSnapshotRetentionDays = &value case float64: @@ -252,7 +252,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RenormalizationWindowDays", err) } s.RenormalizationWindowDays = &value case float64: @@ -262,7 +262,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "results_index_name": if err := dec.Decode(&s.ResultsIndexName); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsIndexName", err) } case "results_retention_days": @@ -272,7 +272,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsRetentionDays", err) } s.ResultsRetentionDays = &value case float64: diff --git a/typedapi/ml/putjob/response.go b/typedapi/ml/putjob/response.go index 811d79346b..247aca61f9 100644 --- a/typedapi/ml/putjob/response.go +++ b/typedapi/ml/putjob/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putjob @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package putjob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_job/MlPutJobResponse.ts#L29-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_job/MlPutJobResponse.ts#L29-L52 type Response struct { AllowLazyOpen bool `json:"allow_lazy_open"` AnalysisConfig types.AnalysisConfigRead `json:"analysis_config"` diff --git a/typedapi/ml/puttrainedmodel/put_trained_model.go b/typedapi/ml/puttrainedmodel/put_trained_model.go index 9bd915bc31..8eb71e14ad 100644 --- a/typedapi/ml/puttrainedmodel/put_trained_model.go +++ b/typedapi/ml/puttrainedmodel/put_trained_model.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates an inference trained model. package puttrainedmodel @@ -437,6 +437,15 @@ func (r *PutTrainedModel) PlatformArchitecture(platformarchitecture string) *Put return r } +// PrefixStrings Optional prefix strings applied at inference +// API name: prefix_strings +func (r *PutTrainedModel) PrefixStrings(prefixstrings *types.TrainedModelPrefixStrings) *PutTrainedModel { + + r.req.PrefixStrings = prefixstrings + + return r +} + // Tags An array of tags to organize the model. // API name: tags func (r *PutTrainedModel) Tags(tags ...string) *PutTrainedModel { diff --git a/typedapi/ml/puttrainedmodel/request.go b/typedapi/ml/puttrainedmodel/request.go index b8a5b8b2ba..457dc2774f 100644 --- a/typedapi/ml/puttrainedmodel/request.go +++ b/typedapi/ml/puttrainedmodel/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package puttrainedmodel @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package puttrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/MlPutTrainedModelRequest.ts#L28-L106 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/MlPutTrainedModelRequest.ts#L29-L113 type Request struct { // CompressedDefinition The compressed (GZipped and Base64 encoded) inference definition of the @@ -68,6 +68,8 @@ type Request struct { // processor // architecture or OS features), leave this field unset. PlatformArchitecture *string `json:"platform_architecture,omitempty"` + // PrefixStrings Optional prefix strings applied at inference + PrefixStrings *types.TrainedModelPrefixStrings `json:"prefix_strings,omitempty"` // Tags An array of tags to organize the model. Tags []string `json:"tags,omitempty"` } diff --git a/typedapi/ml/puttrainedmodel/response.go b/typedapi/ml/puttrainedmodel/response.go index 497dc80c23..498f2a265a 100644 --- a/typedapi/ml/puttrainedmodel/response.go +++ b/typedapi/ml/puttrainedmodel/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package puttrainedmodel @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package puttrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/MlPutTrainedModelResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/MlPutTrainedModelResponse.ts#L22-L24 type Response struct { CompressedDefinition *string `json:"compressed_definition,omitempty"` // CreateTime The time when the trained model was created. diff --git a/typedapi/ml/puttrainedmodelalias/put_trained_model_alias.go b/typedapi/ml/puttrainedmodelalias/put_trained_model_alias.go index 64ec8d6aaf..70219d3c3c 100644 --- a/typedapi/ml/puttrainedmodelalias/put_trained_model_alias.go +++ b/typedapi/ml/puttrainedmodelalias/put_trained_model_alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a new model alias (or reassigns an existing one) to refer to the // trained model diff --git a/typedapi/ml/puttrainedmodelalias/response.go b/typedapi/ml/puttrainedmodelalias/response.go index f83ee804a5..c6c0300a63 100644 --- a/typedapi/ml/puttrainedmodelalias/response.go +++ b/typedapi/ml/puttrainedmodelalias/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package puttrainedmodelalias // Response holds the response body struct for the package puttrainedmodelalias // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model_alias/MlPutTrainedModelAliasResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model_alias/MlPutTrainedModelAliasResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/puttrainedmodeldefinitionpart/put_trained_model_definition_part.go b/typedapi/ml/puttrainedmodeldefinitionpart/put_trained_model_definition_part.go index ec854fe31b..2a02b7e138 100644 --- a/typedapi/ml/puttrainedmodeldefinitionpart/put_trained_model_definition_part.go +++ b/typedapi/ml/puttrainedmodeldefinitionpart/put_trained_model_definition_part.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates part of a trained model definition package puttrainedmodeldefinitionpart diff --git a/typedapi/ml/puttrainedmodeldefinitionpart/request.go b/typedapi/ml/puttrainedmodeldefinitionpart/request.go index f1ad7f921d..b578f6a19a 100644 --- a/typedapi/ml/puttrainedmodeldefinitionpart/request.go +++ b/typedapi/ml/puttrainedmodeldefinitionpart/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package puttrainedmodeldefinitionpart @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package puttrainedmodeldefinitionpart // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model_definition_part/MlPutTrainedModelDefinitionPartRequest.ts#L24-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model_definition_part/MlPutTrainedModelDefinitionPartRequest.ts#L24-L57 type Request struct { // Definition The definition part for the model. Must be a base64 encoded string. diff --git a/typedapi/ml/puttrainedmodeldefinitionpart/response.go b/typedapi/ml/puttrainedmodeldefinitionpart/response.go index 49735ac37b..3dd3ec8925 100644 --- a/typedapi/ml/puttrainedmodeldefinitionpart/response.go +++ b/typedapi/ml/puttrainedmodeldefinitionpart/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package puttrainedmodeldefinitionpart // Response holds the response body struct for the package puttrainedmodeldefinitionpart // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model_definition_part/MlPutTrainedModelDefinitionPartResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model_definition_part/MlPutTrainedModelDefinitionPartResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/puttrainedmodelvocabulary/put_trained_model_vocabulary.go b/typedapi/ml/puttrainedmodelvocabulary/put_trained_model_vocabulary.go index 89aa133c9c..3f3a96edb7 100644 --- a/typedapi/ml/puttrainedmodelvocabulary/put_trained_model_vocabulary.go +++ b/typedapi/ml/puttrainedmodelvocabulary/put_trained_model_vocabulary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a trained model vocabulary package puttrainedmodelvocabulary diff --git a/typedapi/ml/puttrainedmodelvocabulary/request.go b/typedapi/ml/puttrainedmodelvocabulary/request.go index c1611a1ee3..eeff33254f 100644 --- a/typedapi/ml/puttrainedmodelvocabulary/request.go +++ b/typedapi/ml/puttrainedmodelvocabulary/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package puttrainedmodelvocabulary @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package puttrainedmodelvocabulary // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model_vocabulary/MlPutTrainedModelVocabularyRequest.ts#L24-L60 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model_vocabulary/MlPutTrainedModelVocabularyRequest.ts#L24-L60 type Request struct { // Merges The optional model merges if required by the tokenizer. diff --git a/typedapi/ml/puttrainedmodelvocabulary/response.go b/typedapi/ml/puttrainedmodelvocabulary/response.go index 55457d47da..6b71899307 100644 --- a/typedapi/ml/puttrainedmodelvocabulary/response.go +++ b/typedapi/ml/puttrainedmodelvocabulary/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package puttrainedmodelvocabulary // Response holds the response body struct for the package puttrainedmodelvocabulary // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model_vocabulary/MlPutTrainedModelVocabularyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model_vocabulary/MlPutTrainedModelVocabularyResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/resetjob/reset_job.go b/typedapi/ml/resetjob/reset_job.go index aa5e9931da..4c68867dd6 100644 --- a/typedapi/ml/resetjob/reset_job.go +++ b/typedapi/ml/resetjob/reset_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Resets an existing anomaly detection job. package resetjob diff --git a/typedapi/ml/resetjob/response.go b/typedapi/ml/resetjob/response.go index a995e5b4c9..5ff3f5b71c 100644 --- a/typedapi/ml/resetjob/response.go +++ b/typedapi/ml/resetjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package resetjob // Response holds the response body struct for the package resetjob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/reset_job/MlResetJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/reset_job/MlResetJobResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/revertmodelsnapshot/request.go b/typedapi/ml/revertmodelsnapshot/request.go index 30d49d647a..1fef11a51e 100644 --- a/typedapi/ml/revertmodelsnapshot/request.go +++ b/typedapi/ml/revertmodelsnapshot/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package revertmodelsnapshot @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package revertmodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/revert_model_snapshot/MlRevertModelSnapshotRequest.ts#L23-L69 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/revert_model_snapshot/MlRevertModelSnapshotRequest.ts#L23-L69 type Request struct { // DeleteInterveningResults Refer to the description for the `delete_intervening_results` query diff --git a/typedapi/ml/revertmodelsnapshot/response.go b/typedapi/ml/revertmodelsnapshot/response.go index f8f62ede49..55fd2b8b7a 100644 --- a/typedapi/ml/revertmodelsnapshot/response.go +++ b/typedapi/ml/revertmodelsnapshot/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package revertmodelsnapshot @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package revertmodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/revert_model_snapshot/MlRevertModelSnapshotResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/revert_model_snapshot/MlRevertModelSnapshotResponse.ts#L22-L24 type Response struct { Model types.ModelSnapshot `json:"model"` } diff --git a/typedapi/ml/revertmodelsnapshot/revert_model_snapshot.go b/typedapi/ml/revertmodelsnapshot/revert_model_snapshot.go index bbe510a94e..19ddef59ec 100644 --- a/typedapi/ml/revertmodelsnapshot/revert_model_snapshot.go +++ b/typedapi/ml/revertmodelsnapshot/revert_model_snapshot.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Reverts to a specific snapshot. package revertmodelsnapshot diff --git a/typedapi/ml/setupgrademode/response.go b/typedapi/ml/setupgrademode/response.go index d433c17e3d..e5ab889574 100644 --- a/typedapi/ml/setupgrademode/response.go +++ b/typedapi/ml/setupgrademode/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package setupgrademode // Response holds the response body struct for the package setupgrademode // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/set_upgrade_mode/MlSetUpgradeModeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/set_upgrade_mode/MlSetUpgradeModeResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/setupgrademode/set_upgrade_mode.go b/typedapi/ml/setupgrademode/set_upgrade_mode.go index 44c137752b..ae33115095 100644 --- a/typedapi/ml/setupgrademode/set_upgrade_mode.go +++ b/typedapi/ml/setupgrademode/set_upgrade_mode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Sets a cluster wide upgrade_mode setting that prepares machine learning // indices for an upgrade. diff --git a/typedapi/ml/startdatafeed/request.go b/typedapi/ml/startdatafeed/request.go index de777815d1..ea127a9fb3 100644 --- a/typedapi/ml/startdatafeed/request.go +++ b/typedapi/ml/startdatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package startdatafeed @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package startdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/start_datafeed/MlStartDatafeedRequest.ts#L24-L91 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/start_datafeed/MlStartDatafeedRequest.ts#L24-L91 type Request struct { // End Refer to the description for the `end` query parameter. @@ -77,17 +77,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "end": if err := dec.Decode(&s.End); err != nil { - return err + return fmt.Errorf("%s | %w", "End", err) } case "start": if err := dec.Decode(&s.Start); err != nil { - return err + return fmt.Errorf("%s | %w", "Start", err) } case "timeout": if err := dec.Decode(&s.Timeout); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } } diff --git a/typedapi/ml/startdatafeed/response.go b/typedapi/ml/startdatafeed/response.go index 4078b1d6d8..e8685a23cb 100644 --- a/typedapi/ml/startdatafeed/response.go +++ b/typedapi/ml/startdatafeed/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package startdatafeed @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Response holds the response body struct for the package startdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/start_datafeed/MlStartDatafeedResponse.ts#L22-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/start_datafeed/MlStartDatafeedResponse.ts#L22-L34 type Response struct { // Node The ID of the node that the job was started on. In serverless this will be @@ -69,13 +70,13 @@ func (s *Response) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } s.Node = append(s.Node, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Node); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } } @@ -86,7 +87,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Started", err) } s.Started = value case bool: diff --git a/typedapi/ml/startdatafeed/start_datafeed.go b/typedapi/ml/startdatafeed/start_datafeed.go index 4d3ba84d62..48cafb486c 100644 --- a/typedapi/ml/startdatafeed/start_datafeed.go +++ b/typedapi/ml/startdatafeed/start_datafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Starts one or more datafeeds. package startdatafeed diff --git a/typedapi/ml/startdataframeanalytics/response.go b/typedapi/ml/startdataframeanalytics/response.go index 16a2b13124..f194b27899 100644 --- a/typedapi/ml/startdataframeanalytics/response.go +++ b/typedapi/ml/startdataframeanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package startdataframeanalytics // Response holds the response body struct for the package startdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/start_data_frame_analytics/MlStartDataFrameAnalyticsResponse.ts#L22-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/start_data_frame_analytics/MlStartDataFrameAnalyticsResponse.ts#L22-L34 type Response struct { Acknowledged bool `json:"acknowledged"` // Node The ID of the node that the job was started on. If the job is allowed to open diff --git a/typedapi/ml/startdataframeanalytics/start_data_frame_analytics.go b/typedapi/ml/startdataframeanalytics/start_data_frame_analytics.go index dbfde41523..9e2679b3b6 100644 --- a/typedapi/ml/startdataframeanalytics/start_data_frame_analytics.go +++ b/typedapi/ml/startdataframeanalytics/start_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Starts a data frame analytics job. package startdataframeanalytics diff --git a/typedapi/ml/starttrainedmodeldeployment/response.go b/typedapi/ml/starttrainedmodeldeployment/response.go index ef7fe782fc..61b1024a4b 100644 --- a/typedapi/ml/starttrainedmodeldeployment/response.go +++ b/typedapi/ml/starttrainedmodeldeployment/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package starttrainedmodeldeployment @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package starttrainedmodeldeployment // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/start_trained_model_deployment/MlStartTrainedModelDeploymentResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/start_trained_model_deployment/MlStartTrainedModelDeploymentResponse.ts#L22-L26 type Response struct { Assignment types.TrainedModelAssignment `json:"assignment"` } diff --git a/typedapi/ml/starttrainedmodeldeployment/start_trained_model_deployment.go b/typedapi/ml/starttrainedmodeldeployment/start_trained_model_deployment.go index 0541511c70..689737e9d7 100644 --- a/typedapi/ml/starttrainedmodeldeployment/start_trained_model_deployment.go +++ b/typedapi/ml/starttrainedmodeldeployment/start_trained_model_deployment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Start a trained model deployment. package starttrainedmodeldeployment diff --git a/typedapi/ml/stopdatafeed/request.go b/typedapi/ml/stopdatafeed/request.go index cff6c62376..fe106cffd9 100644 --- a/typedapi/ml/stopdatafeed/request.go +++ b/typedapi/ml/stopdatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stopdatafeed @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package stopdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/stop_datafeed/MlStopDatafeedRequest.ts#L24-L78 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/stop_datafeed/MlStopDatafeedRequest.ts#L24-L78 type Request struct { // AllowNoMatch Refer to the description for the `allow_no_match` query parameter. @@ -83,7 +83,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowNoMatch", err) } s.AllowNoMatch = &value case bool: @@ -97,7 +97,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Force", err) } s.Force = &value case bool: @@ -106,7 +106,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "timeout": if err := dec.Decode(&s.Timeout); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } } diff --git a/typedapi/ml/stopdatafeed/response.go b/typedapi/ml/stopdatafeed/response.go index 97562d4480..61707a1b37 100644 --- a/typedapi/ml/stopdatafeed/response.go +++ b/typedapi/ml/stopdatafeed/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stopdatafeed // Response holds the response body struct for the package stopdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/stop_datafeed/MlStopDatafeedResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/stop_datafeed/MlStopDatafeedResponse.ts#L20-L22 type Response struct { Stopped bool `json:"stopped"` } diff --git a/typedapi/ml/stopdatafeed/stop_datafeed.go b/typedapi/ml/stopdatafeed/stop_datafeed.go index 36d75bb24e..fca17ab74a 100644 --- a/typedapi/ml/stopdatafeed/stop_datafeed.go +++ b/typedapi/ml/stopdatafeed/stop_datafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Stops one or more datafeeds. package stopdatafeed diff --git a/typedapi/ml/stopdataframeanalytics/response.go b/typedapi/ml/stopdataframeanalytics/response.go index 0ee4ee648b..19f7346f78 100644 --- a/typedapi/ml/stopdataframeanalytics/response.go +++ b/typedapi/ml/stopdataframeanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stopdataframeanalytics // Response holds the response body struct for the package stopdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/stop_data_frame_analytics/MlStopDataFrameAnalyticsResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/stop_data_frame_analytics/MlStopDataFrameAnalyticsResponse.ts#L20-L22 type Response struct { Stopped bool `json:"stopped"` } diff --git a/typedapi/ml/stopdataframeanalytics/stop_data_frame_analytics.go b/typedapi/ml/stopdataframeanalytics/stop_data_frame_analytics.go index a77df36191..2991c39f71 100644 --- a/typedapi/ml/stopdataframeanalytics/stop_data_frame_analytics.go +++ b/typedapi/ml/stopdataframeanalytics/stop_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Stops one or more data frame analytics jobs. package stopdataframeanalytics diff --git a/typedapi/ml/stoptrainedmodeldeployment/response.go b/typedapi/ml/stoptrainedmodeldeployment/response.go index cd3e30ec84..cc038c32cb 100644 --- a/typedapi/ml/stoptrainedmodeldeployment/response.go +++ b/typedapi/ml/stoptrainedmodeldeployment/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stoptrainedmodeldeployment // Response holds the response body struct for the package stoptrainedmodeldeployment // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/stop_trained_model_deployment/MlStopTrainedModelDeploymentResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/stop_trained_model_deployment/MlStopTrainedModelDeploymentResponse.ts#L20-L22 type Response struct { Stopped bool `json:"stopped"` } diff --git a/typedapi/ml/stoptrainedmodeldeployment/stop_trained_model_deployment.go b/typedapi/ml/stoptrainedmodeldeployment/stop_trained_model_deployment.go index f4b4004377..ef7cbe0538 100644 --- a/typedapi/ml/stoptrainedmodeldeployment/stop_trained_model_deployment.go +++ b/typedapi/ml/stoptrainedmodeldeployment/stop_trained_model_deployment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Stop a trained model deployment. package stoptrainedmodeldeployment diff --git a/typedapi/ml/updatedatafeed/request.go b/typedapi/ml/updatedatafeed/request.go index a7a5e79af6..6381f98512 100644 --- a/typedapi/ml/updatedatafeed/request.go +++ b/typedapi/ml/updatedatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatedatafeed @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package updatedatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/update_datafeed/MlUpdateDatafeedRequest.ts#L31-L162 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/update_datafeed/MlUpdateDatafeedRequest.ts#L31-L162 type Request struct { // Aggregations If set, the datafeed performs aggregation searches. Support for aggregations @@ -157,37 +157,37 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Aggregations = make(map[string]types.Aggregations, 0) } if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "chunking_config": if err := dec.Decode(&s.ChunkingConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "ChunkingConfig", err) } case "delayed_data_check_config": if err := dec.Decode(&s.DelayedDataCheckConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "DelayedDataCheckConfig", err) } case "frequency": if err := dec.Decode(&s.Frequency); err != nil { - return err + return fmt.Errorf("%s | %w", "Frequency", err) } case "indices", "indexes": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "indices_options": if err := dec.Decode(&s.IndicesOptions); err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesOptions", err) } case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "max_empty_searches": @@ -198,7 +198,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxEmptySearches", err) } s.MaxEmptySearches = &value case float64: @@ -208,17 +208,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "query_delay": if err := dec.Decode(&s.QueryDelay); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryDelay", err) } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } case "script_fields": @@ -226,7 +226,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.ScriptFields = make(map[string]types.ScriptField, 0) } if err := dec.Decode(&s.ScriptFields); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptFields", err) } case "scroll_size": @@ -237,7 +237,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollSize", err) } s.ScrollSize = &value case float64: diff --git a/typedapi/ml/updatedatafeed/response.go b/typedapi/ml/updatedatafeed/response.go index d01b89aae0..cdd865b7d1 100644 --- a/typedapi/ml/updatedatafeed/response.go +++ b/typedapi/ml/updatedatafeed/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatedatafeed @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatedatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/update_datafeed/MlUpdateDatafeedResponse.ts#L31-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/update_datafeed/MlUpdateDatafeedResponse.ts#L31-L49 type Response struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Authorization *types.DatafeedAuthorization `json:"authorization,omitempty"` diff --git a/typedapi/ml/updatedatafeed/update_datafeed.go b/typedapi/ml/updatedatafeed/update_datafeed.go index 08f734c34b..08b630f4af 100644 --- a/typedapi/ml/updatedatafeed/update_datafeed.go +++ b/typedapi/ml/updatedatafeed/update_datafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates certain properties of a datafeed. package updatedatafeed diff --git a/typedapi/ml/updatedataframeanalytics/request.go b/typedapi/ml/updatedataframeanalytics/request.go index 0b5fbc17c3..26f2e8efdb 100644 --- a/typedapi/ml/updatedataframeanalytics/request.go +++ b/typedapi/ml/updatedataframeanalytics/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatedataframeanalytics @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package updatedataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/update_data_frame_analytics/MlUpdateDataFrameAnalyticsRequest.ts#L24-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/update_data_frame_analytics/MlUpdateDataFrameAnalyticsRequest.ts#L24-L72 type Request struct { // AllowLazyStart Specifies whether this job can start when there is insufficient machine diff --git a/typedapi/ml/updatedataframeanalytics/response.go b/typedapi/ml/updatedataframeanalytics/response.go index 51bbb70195..6caacbca21 100644 --- a/typedapi/ml/updatedataframeanalytics/response.go +++ b/typedapi/ml/updatedataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatedataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatedataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/update_data_frame_analytics/MlUpdateDataFrameAnalyticsResponse.ts#L30-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/update_data_frame_analytics/MlUpdateDataFrameAnalyticsResponse.ts#L30-L45 type Response struct { AllowLazyStart bool `json:"allow_lazy_start"` Analysis types.DataframeAnalysisContainer `json:"analysis"` diff --git a/typedapi/ml/updatedataframeanalytics/update_data_frame_analytics.go b/typedapi/ml/updatedataframeanalytics/update_data_frame_analytics.go index 6e0978addb..46340c3c2e 100644 --- a/typedapi/ml/updatedataframeanalytics/update_data_frame_analytics.go +++ b/typedapi/ml/updatedataframeanalytics/update_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates certain properties of a data frame analytics job. package updatedataframeanalytics diff --git a/typedapi/ml/updatefilter/request.go b/typedapi/ml/updatefilter/request.go index 59081073c1..0c26f74d25 100644 --- a/typedapi/ml/updatefilter/request.go +++ b/typedapi/ml/updatefilter/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatefilter @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package updatefilter // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/update_filter/MlUpdateFilterRequest.ts#L23-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/update_filter/MlUpdateFilterRequest.ts#L23-L51 type Request struct { // AddItems The items to add to the filter. diff --git a/typedapi/ml/updatefilter/response.go b/typedapi/ml/updatefilter/response.go index 7996380ac5..067f547158 100644 --- a/typedapi/ml/updatefilter/response.go +++ b/typedapi/ml/updatefilter/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatefilter // Response holds the response body struct for the package updatefilter // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/update_filter/MlUpdateFilterResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/update_filter/MlUpdateFilterResponse.ts#L22-L28 type Response struct { Description string `json:"description"` FilterId string `json:"filter_id"` diff --git a/typedapi/ml/updatefilter/update_filter.go b/typedapi/ml/updatefilter/update_filter.go index 499d1b2460..27b2b4a477 100644 --- a/typedapi/ml/updatefilter/update_filter.go +++ b/typedapi/ml/updatefilter/update_filter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates the description of a filter, adds items, or removes items. package updatefilter diff --git a/typedapi/ml/updatejob/request.go b/typedapi/ml/updatejob/request.go index 6ca66ece6f..7e68a54b1e 100644 --- a/typedapi/ml/updatejob/request.go +++ b/typedapi/ml/updatejob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatejob @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package updatejob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/update_job/MlUpdateJobRequest.ts#L33-L138 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/update_job/MlUpdateJobRequest.ts#L33-L138 type Request struct { // AllowLazyOpen Advanced configuration option. Specifies whether this job can open when @@ -139,7 +139,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowLazyOpen", err) } s.AllowLazyOpen = &value case bool: @@ -148,17 +148,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "analysis_limits": if err := dec.Decode(&s.AnalysisLimits); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalysisLimits", err) } case "background_persist_interval": if err := dec.Decode(&s.BackgroundPersistInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "BackgroundPersistInterval", err) } case "categorization_filters": if err := dec.Decode(&s.CategorizationFilters); err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizationFilters", err) } case "custom_settings": @@ -166,7 +166,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.CustomSettings = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.CustomSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "CustomSettings", err) } case "daily_model_snapshot_retention_after_days": @@ -176,7 +176,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DailyModelSnapshotRetentionAfterDays", err) } s.DailyModelSnapshotRetentionAfterDays = &value case float64: @@ -187,7 +187,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -198,22 +198,22 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "detectors": if err := dec.Decode(&s.Detectors); err != nil { - return err + return fmt.Errorf("%s | %w", "Detectors", err) } case "groups": if err := dec.Decode(&s.Groups); err != nil { - return err + return fmt.Errorf("%s | %w", "Groups", err) } case "model_plot_config": if err := dec.Decode(&s.ModelPlotConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelPlotConfig", err) } case "model_prune_window": if err := dec.Decode(&s.ModelPruneWindow); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelPruneWindow", err) } case "model_snapshot_retention_days": @@ -223,7 +223,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSnapshotRetentionDays", err) } s.ModelSnapshotRetentionDays = &value case float64: @@ -233,7 +233,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "per_partition_categorization": if err := dec.Decode(&s.PerPartitionCategorization); err != nil { - return err + return fmt.Errorf("%s | %w", "PerPartitionCategorization", err) } case "renormalization_window_days": @@ -243,7 +243,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RenormalizationWindowDays", err) } s.RenormalizationWindowDays = &value case float64: @@ -258,7 +258,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsRetentionDays", err) } s.ResultsRetentionDays = &value case float64: diff --git a/typedapi/ml/updatejob/response.go b/typedapi/ml/updatejob/response.go index b3762c505c..548a9aca24 100644 --- a/typedapi/ml/updatejob/response.go +++ b/typedapi/ml/updatejob/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatejob @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatejob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/update_job/MlUpdateJobResponse.ts#L29-L53 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/update_job/MlUpdateJobResponse.ts#L29-L53 type Response struct { AllowLazyOpen bool `json:"allow_lazy_open"` AnalysisConfig types.AnalysisConfigRead `json:"analysis_config"` diff --git a/typedapi/ml/updatejob/update_job.go b/typedapi/ml/updatejob/update_job.go index cdd5f04f70..eba852d05d 100644 --- a/typedapi/ml/updatejob/update_job.go +++ b/typedapi/ml/updatejob/update_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates certain properties of an anomaly detection job. package updatejob diff --git a/typedapi/ml/updatemodelsnapshot/request.go b/typedapi/ml/updatemodelsnapshot/request.go index 37777ae94b..f6359fe9ce 100644 --- a/typedapi/ml/updatemodelsnapshot/request.go +++ b/typedapi/ml/updatemodelsnapshot/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatemodelsnapshot @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package updatemodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/update_model_snapshot/MlUpdateModelSnapshotRequest.ts#L23-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/update_model_snapshot/MlUpdateModelSnapshotRequest.ts#L23-L54 type Request struct { // Description A description of the model snapshot. diff --git a/typedapi/ml/updatemodelsnapshot/response.go b/typedapi/ml/updatemodelsnapshot/response.go index 0d8ea3b1e4..78cea3cbb1 100644 --- a/typedapi/ml/updatemodelsnapshot/response.go +++ b/typedapi/ml/updatemodelsnapshot/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatemodelsnapshot @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatemodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/update_model_snapshot/MlUpdateModelSnapshotResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/update_model_snapshot/MlUpdateModelSnapshotResponse.ts#L22-L27 type Response struct { Acknowledged bool `json:"acknowledged"` Model types.ModelSnapshot `json:"model"` diff --git a/typedapi/ml/updatemodelsnapshot/update_model_snapshot.go b/typedapi/ml/updatemodelsnapshot/update_model_snapshot.go index 4aa79ca837..298afcfc40 100644 --- a/typedapi/ml/updatemodelsnapshot/update_model_snapshot.go +++ b/typedapi/ml/updatemodelsnapshot/update_model_snapshot.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates certain properties of a snapshot. package updatemodelsnapshot diff --git a/typedapi/ml/upgradejobsnapshot/response.go b/typedapi/ml/upgradejobsnapshot/response.go index eb2db8690b..6be9aaf440 100644 --- a/typedapi/ml/upgradejobsnapshot/response.go +++ b/typedapi/ml/upgradejobsnapshot/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package upgradejobsnapshot // Response holds the response body struct for the package upgradejobsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/upgrade_job_snapshot/MlUpgradeJobSnapshotResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/upgrade_job_snapshot/MlUpgradeJobSnapshotResponse.ts#L22-L31 type Response struct { // Completed When true, this means the task is complete. When false, it is still running. diff --git a/typedapi/ml/upgradejobsnapshot/upgrade_job_snapshot.go b/typedapi/ml/upgradejobsnapshot/upgrade_job_snapshot.go index 0e48cd6e3a..73ee2a5ecb 100644 --- a/typedapi/ml/upgradejobsnapshot/upgrade_job_snapshot.go +++ b/typedapi/ml/upgradejobsnapshot/upgrade_job_snapshot.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Upgrades a given job snapshot to the current major version. package upgradejobsnapshot diff --git a/typedapi/ml/validate/request.go b/typedapi/ml/validate/request.go index 32c87d4ab3..cb7fced09b 100644 --- a/typedapi/ml/validate/request.go +++ b/typedapi/ml/validate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package validate @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package validate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/validate/MlValidateJobRequest.ts#L27-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/validate/MlValidateJobRequest.ts#L27-L44 type Request struct { AnalysisConfig *types.AnalysisConfig `json:"analysis_config,omitempty"` AnalysisLimits *types.AnalysisLimits `json:"analysis_limits,omitempty"` @@ -80,23 +80,23 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "analysis_config": if err := dec.Decode(&s.AnalysisConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalysisConfig", err) } case "analysis_limits": if err := dec.Decode(&s.AnalysisLimits); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalysisLimits", err) } case "data_description": if err := dec.Decode(&s.DataDescription); err != nil { - return err + return fmt.Errorf("%s | %w", "DataDescription", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -107,17 +107,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "model_plot": if err := dec.Decode(&s.ModelPlot); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelPlot", err) } case "model_snapshot_id": if err := dec.Decode(&s.ModelSnapshotId); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSnapshotId", err) } case "model_snapshot_retention_days": @@ -127,7 +127,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSnapshotRetentionDays", err) } s.ModelSnapshotRetentionDays = &value case float64: @@ -137,7 +137,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "results_index_name": if err := dec.Decode(&s.ResultsIndexName); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsIndexName", err) } } diff --git a/typedapi/ml/validate/response.go b/typedapi/ml/validate/response.go index bbfe23fa8a..93dce01a7f 100644 --- a/typedapi/ml/validate/response.go +++ b/typedapi/ml/validate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package validate // Response holds the response body struct for the package validate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/validate/MlValidateJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/validate/MlValidateJobResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/validate/validate.go b/typedapi/ml/validate/validate.go index b519283cf5..8b6181d59b 100644 --- a/typedapi/ml/validate/validate.go +++ b/typedapi/ml/validate/validate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Validates an anomaly detection job. package validate diff --git a/typedapi/ml/validatedetector/request.go b/typedapi/ml/validatedetector/request.go index 6b11870541..d46ca54690 100644 --- a/typedapi/ml/validatedetector/request.go +++ b/typedapi/ml/validatedetector/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package validatedetector @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package validatedetector // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/validate_detector/MlValidateDetectorRequest.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/validate_detector/MlValidateDetectorRequest.ts#L23-L31 type Request = types.Detector diff --git a/typedapi/ml/validatedetector/response.go b/typedapi/ml/validatedetector/response.go index 57f32a1280..916ce7d103 100644 --- a/typedapi/ml/validatedetector/response.go +++ b/typedapi/ml/validatedetector/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package validatedetector // Response holds the response body struct for the package validatedetector // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/validate_detector/MlValidateDetectorResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/validate_detector/MlValidateDetectorResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/validatedetector/validate_detector.go b/typedapi/ml/validatedetector/validate_detector.go index 992e9bddb8..39a8841780 100644 --- a/typedapi/ml/validatedetector/validate_detector.go +++ b/typedapi/ml/validatedetector/validate_detector.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Validates an anomaly detection detector. package validatedetector diff --git a/typedapi/monitoring/bulk/bulk.go b/typedapi/monitoring/bulk/bulk.go index 4a1901a972..83e3f5c59d 100644 --- a/typedapi/monitoring/bulk/bulk.go +++ b/typedapi/monitoring/bulk/bulk.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Used by the monitoring features to send monitoring data. package bulk diff --git a/typedapi/monitoring/bulk/request.go b/typedapi/monitoring/bulk/request.go index 4215a489f7..012cb26381 100644 --- a/typedapi/monitoring/bulk/request.go +++ b/typedapi/monitoring/bulk/request.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package bulk // Request holds the request body struct for the package bulk // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/monitoring/bulk/BulkMonitoringRequest.ts#L24-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/monitoring/bulk/BulkMonitoringRequest.ts#L24-L59 type Request = []interface{} diff --git a/typedapi/monitoring/bulk/response.go b/typedapi/monitoring/bulk/response.go index 5cd4a0e70c..1fd5db270e 100644 --- a/typedapi/monitoring/bulk/response.go +++ b/typedapi/monitoring/bulk/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package bulk @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package bulk // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/monitoring/bulk/BulkMonitoringResponse.ts#L23-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/monitoring/bulk/BulkMonitoringResponse.ts#L23-L32 type Response struct { Error *types.ErrorCause `json:"error,omitempty"` // Errors True if there is was an error diff --git a/typedapi/nodes/clearrepositoriesmeteringarchive/clear_repositories_metering_archive.go b/typedapi/nodes/clearrepositoriesmeteringarchive/clear_repositories_metering_archive.go index cfb4e83b0f..91904e5b38 100644 --- a/typedapi/nodes/clearrepositoriesmeteringarchive/clear_repositories_metering_archive.go +++ b/typedapi/nodes/clearrepositoriesmeteringarchive/clear_repositories_metering_archive.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Removes the archived repositories metering information present in the // cluster. diff --git a/typedapi/nodes/clearrepositoriesmeteringarchive/response.go b/typedapi/nodes/clearrepositoriesmeteringarchive/response.go index 7509168ef6..c994fb8a05 100644 --- a/typedapi/nodes/clearrepositoriesmeteringarchive/response.go +++ b/typedapi/nodes/clearrepositoriesmeteringarchive/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clearrepositoriesmeteringarchive @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearrepositoriesmeteringarchive // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/clear_repositories_metering_archive/ClearRepositoriesMeteringArchiveResponse.ts#L36-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/clear_repositories_metering_archive/ClearRepositoriesMeteringArchiveResponse.ts#L36-L38 type Response struct { // ClusterName Name of the cluster. Based on the [Cluster name diff --git a/typedapi/nodes/getrepositoriesmeteringinfo/get_repositories_metering_info.go b/typedapi/nodes/getrepositoriesmeteringinfo/get_repositories_metering_info.go index aa36110624..ca3629f6bb 100644 --- a/typedapi/nodes/getrepositoriesmeteringinfo/get_repositories_metering_info.go +++ b/typedapi/nodes/getrepositoriesmeteringinfo/get_repositories_metering_info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns cluster repositories metering information. package getrepositoriesmeteringinfo diff --git a/typedapi/nodes/getrepositoriesmeteringinfo/response.go b/typedapi/nodes/getrepositoriesmeteringinfo/response.go index 885d869234..d2e3db6d09 100644 --- a/typedapi/nodes/getrepositoriesmeteringinfo/response.go +++ b/typedapi/nodes/getrepositoriesmeteringinfo/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getrepositoriesmeteringinfo @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrepositoriesmeteringinfo // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/get_repositories_metering_info/GetRepositoriesMeteringInfoResponse.ts#L36-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/get_repositories_metering_info/GetRepositoriesMeteringInfoResponse.ts#L36-L38 type Response struct { // ClusterName Name of the cluster. Based on the [Cluster name diff --git a/typedapi/nodes/hotthreads/hot_threads.go b/typedapi/nodes/hotthreads/hot_threads.go index 68b20e3126..cfde3d6e43 100644 --- a/typedapi/nodes/hotthreads/hot_threads.go +++ b/typedapi/nodes/hotthreads/hot_threads.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about hot threads on each node in the cluster. package hotthreads diff --git a/typedapi/nodes/hotthreads/response.go b/typedapi/nodes/hotthreads/response.go index 8f99ad9851..3b0161ae02 100644 --- a/typedapi/nodes/hotthreads/response.go +++ b/typedapi/nodes/hotthreads/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package hotthreads @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package hotthreads // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/hot_threads/NodesHotThreadsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/hot_threads/NodesHotThreadsResponse.ts#L22-L24 type Response struct { HotThreads []types.HotThread `json:"hot_threads"` } diff --git a/typedapi/nodes/info/info.go b/typedapi/nodes/info/info.go index 714db47ead..25ad1150fa 100644 --- a/typedapi/nodes/info/info.go +++ b/typedapi/nodes/info/info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about nodes in the cluster. package info diff --git a/typedapi/nodes/info/response.go b/typedapi/nodes/info/response.go index 3d3dc82e36..ec7a1034ed 100644 --- a/typedapi/nodes/info/response.go +++ b/typedapi/nodes/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/NodesInfoResponse.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/NodesInfoResponse.ts#L30-L32 type Response struct { ClusterName string `json:"cluster_name"` // NodeStats Contains statistics about the number of nodes selected by the request’s node diff --git a/typedapi/nodes/reloadsecuresettings/reload_secure_settings.go b/typedapi/nodes/reloadsecuresettings/reload_secure_settings.go index 54c43b3604..8b01cecedd 100644 --- a/typedapi/nodes/reloadsecuresettings/reload_secure_settings.go +++ b/typedapi/nodes/reloadsecuresettings/reload_secure_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Reloads secure settings. package reloadsecuresettings diff --git a/typedapi/nodes/reloadsecuresettings/request.go b/typedapi/nodes/reloadsecuresettings/request.go index 620c5d8bfa..8108f44808 100644 --- a/typedapi/nodes/reloadsecuresettings/request.go +++ b/typedapi/nodes/reloadsecuresettings/request.go @@ -16,21 +16,18 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package reloadsecuresettings import ( - "bytes" "encoding/json" - "errors" "fmt" - "io" ) // Request holds the request body struct for the package reloadsecuresettings // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/reload_secure_settings/ReloadSecureSettingsRequest.ts#L24-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/reload_secure_settings/ReloadSecureSettingsRequest.ts#L24-L50 type Request struct { // SecureSettingsPassword The password for the Elasticsearch keystore. @@ -54,27 +51,3 @@ func (r *Request) FromJSON(data string) (*Request, error) { return &req, nil } - -func (s *Request) UnmarshalJSON(data []byte) error { - dec := json.NewDecoder(bytes.NewReader(data)) - - for { - t, err := dec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "secure_settings_password": - if err := dec.Decode(&s.SecureSettingsPassword); err != nil { - return err - } - - } - } - return nil -} diff --git a/typedapi/nodes/reloadsecuresettings/response.go b/typedapi/nodes/reloadsecuresettings/response.go index 721000f9fa..a149bcea32 100644 --- a/typedapi/nodes/reloadsecuresettings/response.go +++ b/typedapi/nodes/reloadsecuresettings/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package reloadsecuresettings @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types" @@ -31,7 +32,7 @@ import ( // Response holds the response body struct for the package reloadsecuresettings // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/reload_secure_settings/ReloadSecureSettingsResponse.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/reload_secure_settings/ReloadSecureSettingsResponse.ts#L30-L32 type Response struct { ClusterName string `json:"cluster_name"` // NodeStats Contains statistics about the number of nodes selected by the request’s node @@ -64,12 +65,12 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "cluster_name": if err := dec.Decode(&s.ClusterName); err != nil { - return err + return fmt.Errorf("%s | %w", "ClusterName", err) } case "_nodes": if err := dec.Decode(&s.NodeStats); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeStats", err) } case "nodes": @@ -77,7 +78,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Nodes = make(map[string]types.NodeReloadResult, 0) } if err := dec.Decode(&s.Nodes); err != nil { - return err + return fmt.Errorf("%s | %w", "Nodes", err) } } diff --git a/typedapi/nodes/stats/response.go b/typedapi/nodes/stats/response.go index c7e432b1d2..df6449042b 100644 --- a/typedapi/nodes/stats/response.go +++ b/typedapi/nodes/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/stats/NodesStatsResponse.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/stats/NodesStatsResponse.ts#L30-L32 type Response struct { ClusterName *string `json:"cluster_name,omitempty"` // NodeStats Contains statistics about the number of nodes selected by the request’s node diff --git a/typedapi/nodes/stats/stats.go b/typedapi/nodes/stats/stats.go index ab67c02dda..1567a39e79 100644 --- a/typedapi/nodes/stats/stats.go +++ b/typedapi/nodes/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns statistical information about nodes in the cluster. package stats diff --git a/typedapi/nodes/usage/response.go b/typedapi/nodes/usage/response.go index 7f0478a419..4098a062f6 100644 --- a/typedapi/nodes/usage/response.go +++ b/typedapi/nodes/usage/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package usage @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package usage // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/usage/NodesUsageResponse.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/usage/NodesUsageResponse.ts#L30-L32 type Response struct { ClusterName string `json:"cluster_name"` // NodeStats Contains statistics about the number of nodes selected by the request’s node diff --git a/typedapi/nodes/usage/usage.go b/typedapi/nodes/usage/usage.go index 1b4d9a4614..24703a2388 100644 --- a/typedapi/nodes/usage/usage.go +++ b/typedapi/nodes/usage/usage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns low-level information about REST actions usage on nodes. package usage diff --git a/typedapi/queryruleset/delete/delete.go b/typedapi/queryruleset/delete/delete.go index e9cd1d4212..5b5d7e4a6c 100644 --- a/typedapi/queryruleset/delete/delete.go +++ b/typedapi/queryruleset/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes a query ruleset. package delete diff --git a/typedapi/queryruleset/delete/response.go b/typedapi/queryruleset/delete/response.go index 8e2127f19b..28b5b47de8 100644 --- a/typedapi/queryruleset/delete/response.go +++ b/typedapi/queryruleset/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/query_ruleset/delete/QueryRulesetDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/query_ruleset/delete/QueryRulesetDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/queryruleset/get/get.go b/typedapi/queryruleset/get/get.go index 3247ddf455..13ab5be2f1 100644 --- a/typedapi/queryruleset/get/get.go +++ b/typedapi/queryruleset/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the details about a query ruleset. package get diff --git a/typedapi/queryruleset/get/response.go b/typedapi/queryruleset/get/response.go index 09907c2e68..aaf998f0f9 100644 --- a/typedapi/queryruleset/get/response.go +++ b/typedapi/queryruleset/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/query_ruleset/get/QueryRulesetGetResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/query_ruleset/get/QueryRulesetGetResponse.ts#L22-L24 type Response struct { // Rules Rules associated with the query ruleset diff --git a/typedapi/queryruleset/list/list.go b/typedapi/queryruleset/list/list.go index 9197f5c7aa..53c6102e2d 100644 --- a/typedapi/queryruleset/list/list.go +++ b/typedapi/queryruleset/list/list.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Lists query rulesets. package list diff --git a/typedapi/queryruleset/list/response.go b/typedapi/queryruleset/list/response.go index 84dbf09e50..e3004ac28a 100644 --- a/typedapi/queryruleset/list/response.go +++ b/typedapi/queryruleset/list/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package list @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package list // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/query_ruleset/list/QueryRulesetListResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/query_ruleset/list/QueryRulesetListResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Results []types.QueryRulesetListItem `json:"results"` diff --git a/typedapi/queryruleset/put/put.go b/typedapi/queryruleset/put/put.go index 978c4a4f76..131ae52c1c 100644 --- a/typedapi/queryruleset/put/put.go +++ b/typedapi/queryruleset/put/put.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates or updates a query ruleset. package put diff --git a/typedapi/queryruleset/put/request.go b/typedapi/queryruleset/put/request.go index 3831195bd4..bef2bdd6ab 100644 --- a/typedapi/queryruleset/put/request.go +++ b/typedapi/queryruleset/put/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package put @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package put // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/query_ruleset/put/QueryRulesetPutRequest.ts#L23-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/query_ruleset/put/QueryRulesetPutRequest.ts#L23-L43 type Request struct { Rules []types.QueryRule `json:"rules"` } diff --git a/typedapi/queryruleset/put/response.go b/typedapi/queryruleset/put/response.go index 526f665ab3..ceec084492 100644 --- a/typedapi/queryruleset/put/response.go +++ b/typedapi/queryruleset/put/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package put @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package put // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/query_ruleset/put/QueryRulesetPutResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/query_ruleset/put/QueryRulesetPutResponse.ts#L22-L26 type Response struct { Result result.Result `json:"result"` } diff --git a/typedapi/rollup/deletejob/delete_job.go b/typedapi/rollup/deletejob/delete_job.go index c771d43db7..efd8a7857a 100644 --- a/typedapi/rollup/deletejob/delete_job.go +++ b/typedapi/rollup/deletejob/delete_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an existing rollup job. package deletejob diff --git a/typedapi/rollup/deletejob/response.go b/typedapi/rollup/deletejob/response.go index db4b425de0..cf7b0d5d07 100644 --- a/typedapi/rollup/deletejob/response.go +++ b/typedapi/rollup/deletejob/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletejob @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deletejob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/delete_job/DeleteRollupJobResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/delete_job/DeleteRollupJobResponse.ts#L22-L27 type Response struct { Acknowledged bool `json:"acknowledged"` TaskFailures []types.TaskFailure `json:"task_failures,omitempty"` diff --git a/typedapi/rollup/getjobs/get_jobs.go b/typedapi/rollup/getjobs/get_jobs.go index 19162dafc6..b43579e7b2 100644 --- a/typedapi/rollup/getjobs/get_jobs.go +++ b/typedapi/rollup/getjobs/get_jobs.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves the configuration, stats, and status of rollup jobs. package getjobs diff --git a/typedapi/rollup/getjobs/response.go b/typedapi/rollup/getjobs/response.go index 07b18170fd..bb65ba13ec 100644 --- a/typedapi/rollup/getjobs/response.go +++ b/typedapi/rollup/getjobs/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getjobs @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getjobs // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_jobs/GetRollupJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_jobs/GetRollupJobResponse.ts#L22-L24 type Response struct { Jobs []types.RollupJob `json:"jobs"` } diff --git a/typedapi/rollup/getrollupcaps/get_rollup_caps.go b/typedapi/rollup/getrollupcaps/get_rollup_caps.go index 6f6d33c696..48714c0e00 100644 --- a/typedapi/rollup/getrollupcaps/get_rollup_caps.go +++ b/typedapi/rollup/getrollupcaps/get_rollup_caps.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the capabilities of any rollup jobs that have been configured for a // specific index or index pattern. diff --git a/typedapi/rollup/getrollupcaps/response.go b/typedapi/rollup/getrollupcaps/response.go index 56ab175e6c..91757e053d 100644 --- a/typedapi/rollup/getrollupcaps/response.go +++ b/typedapi/rollup/getrollupcaps/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getrollupcaps @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrollupcaps // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_rollup_caps/GetRollupCapabilitiesResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_rollup_caps/GetRollupCapabilitiesResponse.ts#L24-L26 type Response map[string]types.RollupCapabilities diff --git a/typedapi/rollup/getrollupindexcaps/get_rollup_index_caps.go b/typedapi/rollup/getrollupindexcaps/get_rollup_index_caps.go index 422a53ae2b..9d3f97780a 100644 --- a/typedapi/rollup/getrollupindexcaps/get_rollup_index_caps.go +++ b/typedapi/rollup/getrollupindexcaps/get_rollup_index_caps.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the rollup capabilities of all jobs inside of a rollup index (e.g. // the index where rollup data is stored). diff --git a/typedapi/rollup/getrollupindexcaps/response.go b/typedapi/rollup/getrollupindexcaps/response.go index 0551ecd788..deced4e412 100644 --- a/typedapi/rollup/getrollupindexcaps/response.go +++ b/typedapi/rollup/getrollupindexcaps/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getrollupindexcaps @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrollupindexcaps // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_rollup_index_caps/GetRollupIndexCapabilitiesResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_rollup_index_caps/GetRollupIndexCapabilitiesResponse.ts#L24-L26 type Response map[string]types.IndexCapabilities diff --git a/typedapi/rollup/putjob/put_job.go b/typedapi/rollup/putjob/put_job.go index 1d9f23d60c..f1df187759 100644 --- a/typedapi/rollup/putjob/put_job.go +++ b/typedapi/rollup/putjob/put_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a rollup job. package putjob diff --git a/typedapi/rollup/putjob/request.go b/typedapi/rollup/putjob/request.go index 2769bff4ef..a1662e163d 100644 --- a/typedapi/rollup/putjob/request.go +++ b/typedapi/rollup/putjob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putjob @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putjob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/put_job/CreateRollupJobRequest.ts#L27-L89 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/put_job/CreateRollupJobRequest.ts#L27-L89 type Request struct { // Cron A cron string which defines the intervals when the rollup job should be @@ -119,7 +119,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "cron": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Cron", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -130,18 +130,18 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "groups": if err := dec.Decode(&s.Groups); err != nil { - return err + return fmt.Errorf("%s | %w", "Groups", err) } case "headers": if err := dec.Decode(&s.Headers); err != nil { - return err + return fmt.Errorf("%s | %w", "Headers", err) } case "index_pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -152,7 +152,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "metrics": if err := dec.Decode(&s.Metrics); err != nil { - return err + return fmt.Errorf("%s | %w", "Metrics", err) } case "page_size": @@ -163,7 +163,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PageSize", err) } s.PageSize = value case float64: @@ -173,12 +173,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "rollup_index": if err := dec.Decode(&s.RollupIndex); err != nil { - return err + return fmt.Errorf("%s | %w", "RollupIndex", err) } case "timeout": if err := dec.Decode(&s.Timeout); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } } diff --git a/typedapi/rollup/putjob/response.go b/typedapi/rollup/putjob/response.go index 39d0b1e6b5..87a306b4f8 100644 --- a/typedapi/rollup/putjob/response.go +++ b/typedapi/rollup/putjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putjob // Response holds the response body struct for the package putjob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/put_job/CreateRollupJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/put_job/CreateRollupJobResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/rollup/rollupsearch/request.go b/typedapi/rollup/rollupsearch/request.go index 4a28d6f61e..f2f4ada534 100644 --- a/typedapi/rollup/rollupsearch/request.go +++ b/typedapi/rollup/rollupsearch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package rollupsearch @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package rollupsearch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/rollup_search/RollupSearchRequest.ts#L27-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/rollup_search/RollupSearchRequest.ts#L27-L57 type Request struct { // Aggregations Specifies aggregations. @@ -83,12 +83,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Aggregations = make(map[string]types.Aggregations, 0) } if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "size": @@ -99,7 +99,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: diff --git a/typedapi/rollup/rollupsearch/response.go b/typedapi/rollup/rollupsearch/response.go index 4f222f665d..07f6aa56b8 100644 --- a/typedapi/rollup/rollupsearch/response.go +++ b/typedapi/rollup/rollupsearch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package rollupsearch @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" "strings" @@ -33,7 +34,7 @@ import ( // Response holds the response body struct for the package rollupsearch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/rollup_search/RollupSearchResponse.ts#L27-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/rollup_search/RollupSearchResponse.ts#L27-L36 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` Hits types.HitsMetadata `json:"hits"` @@ -90,490 +91,490 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "cardinality": o := types.NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := types.NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := types.NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := types.NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := types.NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := types.NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := types.NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := types.NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := types.NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := types.NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := types.NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := types.NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := types.NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := types.NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := types.NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := types.NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := types.NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := types.NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := types.NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := types.NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := types.NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := types.NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := types.NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := types.NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := types.NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := types.NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := types.NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := types.NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := types.NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := types.NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := types.NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := types.NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := types.NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := types.NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := types.NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := types.NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := types.NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := types.NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := types.NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := types.NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := types.NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := types.NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := types.NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := types.NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := types.NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := types.NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := types.NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := types.NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := types.NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := types.NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := types.NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := types.NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := types.NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := types.NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := types.NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := types.NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := types.NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := types.NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := types.NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := types.NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := types.NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := types.NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := types.NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := types.NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := types.NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := types.NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := types.NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := types.NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := types.NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -583,7 +584,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } @@ -592,12 +593,12 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "hits": if err := dec.Decode(&s.Hits); err != nil { - return err + return fmt.Errorf("%s | %w", "Hits", err) } case "_shards": if err := dec.Decode(&s.Shards_); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards_", err) } case "terminated_early": @@ -607,7 +608,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TerminatedEarly", err) } s.TerminatedEarly = &value case bool: @@ -621,7 +622,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimedOut", err) } s.TimedOut = value case bool: @@ -635,7 +636,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } s.Took = value case float64: diff --git a/typedapi/rollup/rollupsearch/rollup_search.go b/typedapi/rollup/rollupsearch/rollup_search.go index 5064ce7e42..39699bfaaf 100644 --- a/typedapi/rollup/rollupsearch/rollup_search.go +++ b/typedapi/rollup/rollupsearch/rollup_search.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Enables searching rolled-up data using the standard query DSL. package rollupsearch diff --git a/typedapi/rollup/startjob/response.go b/typedapi/rollup/startjob/response.go index 6de8790568..441fc1c5ca 100644 --- a/typedapi/rollup/startjob/response.go +++ b/typedapi/rollup/startjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package startjob // Response holds the response body struct for the package startjob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/start_job/StartRollupJobResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/start_job/StartRollupJobResponse.ts#L20-L22 type Response struct { Started bool `json:"started"` } diff --git a/typedapi/rollup/startjob/start_job.go b/typedapi/rollup/startjob/start_job.go index 6d70be4c67..48f7a42f59 100644 --- a/typedapi/rollup/startjob/start_job.go +++ b/typedapi/rollup/startjob/start_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Starts an existing, stopped rollup job. package startjob diff --git a/typedapi/rollup/stopjob/response.go b/typedapi/rollup/stopjob/response.go index f045f5734d..2351670ec7 100644 --- a/typedapi/rollup/stopjob/response.go +++ b/typedapi/rollup/stopjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stopjob // Response holds the response body struct for the package stopjob // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/stop_job/StopRollupJobResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/stop_job/StopRollupJobResponse.ts#L20-L22 type Response struct { Stopped bool `json:"stopped"` } diff --git a/typedapi/rollup/stopjob/stop_job.go b/typedapi/rollup/stopjob/stop_job.go index 1096c3b1c7..bbd004e7da 100644 --- a/typedapi/rollup/stopjob/stop_job.go +++ b/typedapi/rollup/stopjob/stop_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Stops an existing, started rollup job. package stopjob diff --git a/typedapi/searchablesnapshots/cachestats/cache_stats.go b/typedapi/searchablesnapshots/cachestats/cache_stats.go index b98c3db2d8..c62c105f7b 100644 --- a/typedapi/searchablesnapshots/cachestats/cache_stats.go +++ b/typedapi/searchablesnapshots/cachestats/cache_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieve node-level cache statistics about searchable snapshots. package cachestats diff --git a/typedapi/searchablesnapshots/cachestats/response.go b/typedapi/searchablesnapshots/cachestats/response.go index f1b1da5940..869a0e9f37 100644 --- a/typedapi/searchablesnapshots/cachestats/response.go +++ b/typedapi/searchablesnapshots/cachestats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package cachestats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package cachestats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/searchable_snapshots/cache_stats/Response.ts#L24-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/searchable_snapshots/cache_stats/Response.ts#L24-L28 type Response struct { Nodes map[string]types.Node `json:"nodes"` } diff --git a/typedapi/searchablesnapshots/clearcache/clear_cache.go b/typedapi/searchablesnapshots/clearcache/clear_cache.go index 98021e87d0..a45d72daee 100644 --- a/typedapi/searchablesnapshots/clearcache/clear_cache.go +++ b/typedapi/searchablesnapshots/clearcache/clear_cache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Clear the cache of searchable snapshots. package clearcache diff --git a/typedapi/searchablesnapshots/clearcache/response.go b/typedapi/searchablesnapshots/clearcache/response.go index 802937fb26..5f3f8798ef 100644 --- a/typedapi/searchablesnapshots/clearcache/response.go +++ b/typedapi/searchablesnapshots/clearcache/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clearcache @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcache // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/searchable_snapshots/clear_cache/SearchableSnapshotsClearCacheResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/searchable_snapshots/clear_cache/SearchableSnapshotsClearCacheResponse.ts#L22-L24 type Response = json.RawMessage diff --git a/typedapi/searchablesnapshots/mount/mount.go b/typedapi/searchablesnapshots/mount/mount.go index 873a81193c..1b0b2d4251 100644 --- a/typedapi/searchablesnapshots/mount/mount.go +++ b/typedapi/searchablesnapshots/mount/mount.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Mount a snapshot as a searchable index. package mount diff --git a/typedapi/searchablesnapshots/mount/request.go b/typedapi/searchablesnapshots/mount/request.go index f2abb8f37d..56e1622c34 100644 --- a/typedapi/searchablesnapshots/mount/request.go +++ b/typedapi/searchablesnapshots/mount/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package mount @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package mount // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/searchable_snapshots/mount/SearchableSnapshotsMountRequest.ts#L26-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/searchable_snapshots/mount/SearchableSnapshotsMountRequest.ts#L26-L49 type Request struct { IgnoreIndexSettings []string `json:"ignore_index_settings,omitempty"` Index string `json:"index"` @@ -74,12 +74,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "ignore_index_settings": if err := dec.Decode(&s.IgnoreIndexSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreIndexSettings", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "index_settings": @@ -87,12 +87,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.IndexSettings = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.IndexSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexSettings", err) } case "renamed_index": if err := dec.Decode(&s.RenamedIndex); err != nil { - return err + return fmt.Errorf("%s | %w", "RenamedIndex", err) } } diff --git a/typedapi/searchablesnapshots/mount/response.go b/typedapi/searchablesnapshots/mount/response.go index 89be199c56..b5d33cd723 100644 --- a/typedapi/searchablesnapshots/mount/response.go +++ b/typedapi/searchablesnapshots/mount/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package mount @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mount // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/searchable_snapshots/mount/SearchableSnapshotsMountResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/searchable_snapshots/mount/SearchableSnapshotsMountResponse.ts#L22-L26 type Response struct { Snapshot types.MountedSnapshot `json:"snapshot"` } diff --git a/typedapi/searchablesnapshots/stats/response.go b/typedapi/searchablesnapshots/stats/response.go index a837afd193..d42766f27a 100644 --- a/typedapi/searchablesnapshots/stats/response.go +++ b/typedapi/searchablesnapshots/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/searchable_snapshots/stats/SearchableSnapshotsStatsResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/searchable_snapshots/stats/SearchableSnapshotsStatsResponse.ts#L22-L27 type Response struct { Stats json.RawMessage `json:"stats,omitempty"` Total json.RawMessage `json:"total,omitempty"` diff --git a/typedapi/searchablesnapshots/stats/stats.go b/typedapi/searchablesnapshots/stats/stats.go index 84154ee23a..dcbe7680ff 100644 --- a/typedapi/searchablesnapshots/stats/stats.go +++ b/typedapi/searchablesnapshots/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieve shard-level statistics about searchable snapshots. package stats diff --git a/typedapi/searchapplication/delete/delete.go b/typedapi/searchapplication/delete/delete.go index 7a0c1a1e8e..85b39269fa 100644 --- a/typedapi/searchapplication/delete/delete.go +++ b/typedapi/searchapplication/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes a search application. package delete diff --git a/typedapi/searchapplication/delete/response.go b/typedapi/searchapplication/delete/response.go index 35e1daf32e..fb9d6e851d 100644 --- a/typedapi/searchapplication/delete/response.go +++ b/typedapi/searchapplication/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/delete/SearchApplicationsDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/delete/SearchApplicationsDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/searchapplication/deletebehavioralanalytics/delete_behavioral_analytics.go b/typedapi/searchapplication/deletebehavioralanalytics/delete_behavioral_analytics.go index ec29b51eda..ec8d5a0868 100644 --- a/typedapi/searchapplication/deletebehavioralanalytics/delete_behavioral_analytics.go +++ b/typedapi/searchapplication/deletebehavioralanalytics/delete_behavioral_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Delete a behavioral analytics collection. package deletebehavioralanalytics diff --git a/typedapi/searchapplication/deletebehavioralanalytics/response.go b/typedapi/searchapplication/deletebehavioralanalytics/response.go index 17c03fcfe0..b46c84608b 100644 --- a/typedapi/searchapplication/deletebehavioralanalytics/response.go +++ b/typedapi/searchapplication/deletebehavioralanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletebehavioralanalytics // Response holds the response body struct for the package deletebehavioralanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/delete_behavioral_analytics/BehavioralAnalyticsDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/delete_behavioral_analytics/BehavioralAnalyticsDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/searchapplication/get/get.go b/typedapi/searchapplication/get/get.go index 4546a17914..f971114492 100644 --- a/typedapi/searchapplication/get/get.go +++ b/typedapi/searchapplication/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the details about a search application. package get diff --git a/typedapi/searchapplication/get/response.go b/typedapi/searchapplication/get/response.go index db9828d346..23decf41ec 100644 --- a/typedapi/searchapplication/get/response.go +++ b/typedapi/searchapplication/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/get/SearchApplicationsGetResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/get/SearchApplicationsGetResponse.ts#L22-L24 type Response struct { // AnalyticsCollectionName Analytics collection associated to the Search Application. diff --git a/typedapi/searchapplication/getbehavioralanalytics/get_behavioral_analytics.go b/typedapi/searchapplication/getbehavioralanalytics/get_behavioral_analytics.go index ebc61ae75c..2a5b819d2b 100644 --- a/typedapi/searchapplication/getbehavioralanalytics/get_behavioral_analytics.go +++ b/typedapi/searchapplication/getbehavioralanalytics/get_behavioral_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the existing behavioral analytics collections. package getbehavioralanalytics diff --git a/typedapi/searchapplication/getbehavioralanalytics/response.go b/typedapi/searchapplication/getbehavioralanalytics/response.go index ba46da742c..e92f3d20be 100644 --- a/typedapi/searchapplication/getbehavioralanalytics/response.go +++ b/typedapi/searchapplication/getbehavioralanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getbehavioralanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getbehavioralanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/get_behavioral_analytics/BehavioralAnalyticsGetResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/get_behavioral_analytics/BehavioralAnalyticsGetResponse.ts#L24-L26 type Response map[string]types.AnalyticsCollection diff --git a/typedapi/searchapplication/list/list.go b/typedapi/searchapplication/list/list.go index 8427985841..803a3f64ed 100644 --- a/typedapi/searchapplication/list/list.go +++ b/typedapi/searchapplication/list/list.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the existing search applications. package list diff --git a/typedapi/searchapplication/list/response.go b/typedapi/searchapplication/list/response.go index 20475ef281..4d54ca8a90 100644 --- a/typedapi/searchapplication/list/response.go +++ b/typedapi/searchapplication/list/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package list @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package list // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/list/SearchApplicationsListResponse.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/list/SearchApplicationsListResponse.ts#L24-L29 type Response struct { Count int64 `json:"count"` Results []types.SearchApplicationListItem `json:"results"` diff --git a/typedapi/searchapplication/put/put.go b/typedapi/searchapplication/put/put.go index 503244abe4..f64e6b0f15 100644 --- a/typedapi/searchapplication/put/put.go +++ b/typedapi/searchapplication/put/put.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates or updates a search application. package put diff --git a/typedapi/searchapplication/put/request.go b/typedapi/searchapplication/put/request.go index 0fe8a8efd3..847fc0f6b9 100644 --- a/typedapi/searchapplication/put/request.go +++ b/typedapi/searchapplication/put/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package put @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package put // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/put/SearchApplicationsPutRequest.ts#L23-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/put/SearchApplicationsPutRequest.ts#L23-L48 type Request = types.SearchApplication diff --git a/typedapi/searchapplication/put/response.go b/typedapi/searchapplication/put/response.go index 9018c62260..819c72e677 100644 --- a/typedapi/searchapplication/put/response.go +++ b/typedapi/searchapplication/put/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package put @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package put // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/put/SearchApplicationsPutResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/put/SearchApplicationsPutResponse.ts#L22-L26 type Response struct { Result result.Result `json:"result"` } diff --git a/typedapi/searchapplication/putbehavioralanalytics/put_behavioral_analytics.go b/typedapi/searchapplication/putbehavioralanalytics/put_behavioral_analytics.go index 3b08ad83e8..47048b776b 100644 --- a/typedapi/searchapplication/putbehavioralanalytics/put_behavioral_analytics.go +++ b/typedapi/searchapplication/putbehavioralanalytics/put_behavioral_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a behavioral analytics collection. package putbehavioralanalytics diff --git a/typedapi/searchapplication/putbehavioralanalytics/response.go b/typedapi/searchapplication/putbehavioralanalytics/response.go index 3e65362710..9eeabc338c 100644 --- a/typedapi/searchapplication/putbehavioralanalytics/response.go +++ b/typedapi/searchapplication/putbehavioralanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putbehavioralanalytics // Response holds the response body struct for the package putbehavioralanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/put_behavioral_analytics/BehavioralAnalyticsPutResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/put_behavioral_analytics/BehavioralAnalyticsPutResponse.ts#L24-L26 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/searchapplication/search/request.go b/typedapi/searchapplication/search/request.go index 2f80fc04ca..5ee0863979 100644 --- a/typedapi/searchapplication/search/request.go +++ b/typedapi/searchapplication/search/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package search @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/search/SearchApplicationsSearchRequest.ts#L24-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/search/SearchApplicationsSearchRequest.ts#L24-L43 type Request struct { // Params Query parameters specific to this request, which will override any defaults diff --git a/typedapi/searchapplication/search/response.go b/typedapi/searchapplication/search/response.go index 12b1266d3d..5247e4a940 100644 --- a/typedapi/searchapplication/search/response.go +++ b/typedapi/searchapplication/search/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package search @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" "strings" @@ -33,7 +34,7 @@ import ( // Response holds the response body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/search/SearchApplicationsSearchResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/search/SearchApplicationsSearchResponse.ts#L23-L25 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` Clusters_ *types.ClusterStatistics `json:"_clusters,omitempty"` @@ -100,490 +101,490 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "cardinality": o := types.NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := types.NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := types.NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := types.NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := types.NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := types.NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := types.NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := types.NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := types.NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := types.NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := types.NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := types.NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := types.NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := types.NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := types.NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := types.NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := types.NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := types.NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := types.NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := types.NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := types.NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := types.NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := types.NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := types.NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := types.NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := types.NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := types.NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := types.NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := types.NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := types.NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := types.NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := types.NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := types.NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := types.NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := types.NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := types.NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := types.NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := types.NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := types.NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := types.NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := types.NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := types.NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := types.NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := types.NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := types.NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := types.NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := types.NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := types.NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := types.NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := types.NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := types.NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := types.NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := types.NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := types.NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := types.NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := types.NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := types.NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := types.NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := types.NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := types.NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := types.NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := types.NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := types.NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := types.NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := types.NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := types.NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := types.NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := types.NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := types.NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -593,7 +594,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } @@ -602,7 +603,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "_clusters": if err := dec.Decode(&s.Clusters_); err != nil { - return err + return fmt.Errorf("%s | %w", "Clusters_", err) } case "fields": @@ -610,12 +611,12 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "hits": if err := dec.Decode(&s.Hits); err != nil { - return err + return fmt.Errorf("%s | %w", "Hits", err) } case "max_score": @@ -625,7 +626,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxScore", err) } f := types.Float64(value) s.MaxScore = &f @@ -641,7 +642,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumReducePhases", err) } s.NumReducePhases = &value case float64: @@ -651,22 +652,22 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "pit_id": if err := dec.Decode(&s.PitId); err != nil { - return err + return fmt.Errorf("%s | %w", "PitId", err) } case "profile": if err := dec.Decode(&s.Profile); err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } case "_scroll_id": if err := dec.Decode(&s.ScrollId_); err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollId_", err) } case "_shards": if err := dec.Decode(&s.Shards_); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards_", err) } case "suggest": @@ -694,28 +695,28 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "completion": o := types.NewCompletionSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "phrase": o := types.NewPhraseSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "term": o := types.NewTermSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) } @@ -725,7 +726,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[value] = append(s.Suggest[value], o) } @@ -739,7 +740,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TerminatedEarly", err) } s.TerminatedEarly = &value case bool: @@ -753,7 +754,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimedOut", err) } s.TimedOut = value case bool: @@ -767,7 +768,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } s.Took = value case float64: diff --git a/typedapi/searchapplication/search/search.go b/typedapi/searchapplication/search/search.go index e6a5d64614..d7f5249485 100644 --- a/typedapi/searchapplication/search/search.go +++ b/typedapi/searchapplication/search/search.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Perform a search against a search application package search diff --git a/typedapi/security/activateuserprofile/activate_user_profile.go b/typedapi/security/activateuserprofile/activate_user_profile.go index 7818b17ee9..a14ab05d7c 100644 --- a/typedapi/security/activateuserprofile/activate_user_profile.go +++ b/typedapi/security/activateuserprofile/activate_user_profile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates or updates the user profile on behalf of another user. package activateuserprofile diff --git a/typedapi/security/activateuserprofile/request.go b/typedapi/security/activateuserprofile/request.go index 5073a18441..c217f67a09 100644 --- a/typedapi/security/activateuserprofile/request.go +++ b/typedapi/security/activateuserprofile/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package activateuserprofile @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package activateuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/activate_user_profile/Request.ts#L23-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/activate_user_profile/Request.ts#L23-L37 type Request struct { AccessToken *string `json:"access_token,omitempty"` GrantType granttype.GrantType `json:"grant_type"` diff --git a/typedapi/security/activateuserprofile/response.go b/typedapi/security/activateuserprofile/response.go index 8d62131880..e9c32d79ea 100644 --- a/typedapi/security/activateuserprofile/response.go +++ b/typedapi/security/activateuserprofile/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package activateuserprofile @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package activateuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/activate_user_profile/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/activate_user_profile/Response.ts#L22-L24 type Response struct { Data map[string]json.RawMessage `json:"data"` Doc_ types.UserProfileHitMetadata `json:"_doc"` diff --git a/typedapi/security/authenticate/authenticate.go b/typedapi/security/authenticate/authenticate.go index 22ce97e9e6..a52258638f 100644 --- a/typedapi/security/authenticate/authenticate.go +++ b/typedapi/security/authenticate/authenticate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Enables authentication as a user and retrieve information about the // authenticated user. diff --git a/typedapi/security/authenticate/response.go b/typedapi/security/authenticate/response.go index d6d10b2bc2..0acb159b66 100644 --- a/typedapi/security/authenticate/response.go +++ b/typedapi/security/authenticate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package authenticate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package authenticate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/authenticate/SecurityAuthenticateResponse.ts#L25-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/authenticate/SecurityAuthenticateResponse.ts#L25-L43 type Response struct { ApiKey *types.ApiKey `json:"api_key,omitempty"` AuthenticationRealm types.RealmInfo `json:"authentication_realm"` diff --git a/typedapi/security/bulkupdateapikeys/bulk_update_api_keys.go b/typedapi/security/bulkupdateapikeys/bulk_update_api_keys.go index f0da4a9aea..b09c63d434 100644 --- a/typedapi/security/bulkupdateapikeys/bulk_update_api_keys.go +++ b/typedapi/security/bulkupdateapikeys/bulk_update_api_keys.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates the attributes of multiple existing API keys. package bulkupdateapikeys diff --git a/typedapi/security/changepassword/change_password.go b/typedapi/security/changepassword/change_password.go index c7d7f9a945..08c4331283 100644 --- a/typedapi/security/changepassword/change_password.go +++ b/typedapi/security/changepassword/change_password.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Changes the passwords of users in the native realm and built-in users. package changepassword diff --git a/typedapi/security/changepassword/request.go b/typedapi/security/changepassword/request.go index efb3728598..14de7f0add 100644 --- a/typedapi/security/changepassword/request.go +++ b/typedapi/security/changepassword/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package changepassword @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package changepassword // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/change_password/SecurityChangePasswordRequest.ts#L23-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/change_password/SecurityChangePasswordRequest.ts#L23-L51 type Request struct { // Password The new password value. Passwords must be at least 6 characters long. @@ -78,13 +78,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "password": if err := dec.Decode(&s.Password); err != nil { - return err + return fmt.Errorf("%s | %w", "Password", err) } case "password_hash": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PasswordHash", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/security/changepassword/response.go b/typedapi/security/changepassword/response.go index bb2a0f9faa..21086c3dda 100644 --- a/typedapi/security/changepassword/response.go +++ b/typedapi/security/changepassword/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package changepassword // Response holds the response body struct for the package changepassword // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/change_password/SecurityChangePasswordResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/change_password/SecurityChangePasswordResponse.ts#L20-L22 type Response struct { } diff --git a/typedapi/security/clearapikeycache/clear_api_key_cache.go b/typedapi/security/clearapikeycache/clear_api_key_cache.go index ecfd515b94..c29ddba28e 100644 --- a/typedapi/security/clearapikeycache/clear_api_key_cache.go +++ b/typedapi/security/clearapikeycache/clear_api_key_cache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Clear a subset or all entries from the API key cache. package clearapikeycache diff --git a/typedapi/security/clearapikeycache/response.go b/typedapi/security/clearapikeycache/response.go index e5163454f2..954e562a8e 100644 --- a/typedapi/security/clearapikeycache/response.go +++ b/typedapi/security/clearapikeycache/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clearapikeycache @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearapikeycache // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/clear_api_key_cache/SecurityClearApiKeyCacheResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/clear_api_key_cache/SecurityClearApiKeyCacheResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` NodeStats types.NodeStatistics `json:"_nodes"` diff --git a/typedapi/security/clearcachedprivileges/clear_cached_privileges.go b/typedapi/security/clearcachedprivileges/clear_cached_privileges.go index cd1bd1d205..4a1c9eefba 100644 --- a/typedapi/security/clearcachedprivileges/clear_cached_privileges.go +++ b/typedapi/security/clearcachedprivileges/clear_cached_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Evicts application privileges from the native application privileges cache. package clearcachedprivileges diff --git a/typedapi/security/clearcachedprivileges/response.go b/typedapi/security/clearcachedprivileges/response.go index 8edc0f73c3..ed5c5c82b4 100644 --- a/typedapi/security/clearcachedprivileges/response.go +++ b/typedapi/security/clearcachedprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clearcachedprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcachedprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/clear_cached_privileges/SecurityClearCachedPrivilegesResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/clear_cached_privileges/SecurityClearCachedPrivilegesResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` NodeStats types.NodeStatistics `json:"_nodes"` diff --git a/typedapi/security/clearcachedrealms/clear_cached_realms.go b/typedapi/security/clearcachedrealms/clear_cached_realms.go index f8dc67ca18..34d96c1c1a 100644 --- a/typedapi/security/clearcachedrealms/clear_cached_realms.go +++ b/typedapi/security/clearcachedrealms/clear_cached_realms.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Evicts users from the user cache. Can completely clear the cache or evict // specific users. diff --git a/typedapi/security/clearcachedrealms/response.go b/typedapi/security/clearcachedrealms/response.go index a9cb30b8af..904b44fab5 100644 --- a/typedapi/security/clearcachedrealms/response.go +++ b/typedapi/security/clearcachedrealms/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clearcachedrealms @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcachedrealms // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/clear_cached_realms/SecurityClearCachedRealmsResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/clear_cached_realms/SecurityClearCachedRealmsResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` NodeStats types.NodeStatistics `json:"_nodes"` diff --git a/typedapi/security/clearcachedroles/clear_cached_roles.go b/typedapi/security/clearcachedroles/clear_cached_roles.go index 146c6407bc..95f26a1a64 100644 --- a/typedapi/security/clearcachedroles/clear_cached_roles.go +++ b/typedapi/security/clearcachedroles/clear_cached_roles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Evicts roles from the native role cache. package clearcachedroles diff --git a/typedapi/security/clearcachedroles/response.go b/typedapi/security/clearcachedroles/response.go index 1e0a5015d6..f34fdce216 100644 --- a/typedapi/security/clearcachedroles/response.go +++ b/typedapi/security/clearcachedroles/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clearcachedroles @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcachedroles // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/clear_cached_roles/ClearCachedRolesResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/clear_cached_roles/ClearCachedRolesResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` NodeStats types.NodeStatistics `json:"_nodes"` diff --git a/typedapi/security/clearcachedservicetokens/clear_cached_service_tokens.go b/typedapi/security/clearcachedservicetokens/clear_cached_service_tokens.go index 7628318eac..9fbbe40f24 100644 --- a/typedapi/security/clearcachedservicetokens/clear_cached_service_tokens.go +++ b/typedapi/security/clearcachedservicetokens/clear_cached_service_tokens.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Evicts tokens from the service account token caches. package clearcachedservicetokens diff --git a/typedapi/security/clearcachedservicetokens/response.go b/typedapi/security/clearcachedservicetokens/response.go index b7125d4871..76561d4f8f 100644 --- a/typedapi/security/clearcachedservicetokens/response.go +++ b/typedapi/security/clearcachedservicetokens/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clearcachedservicetokens @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcachedservicetokens // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/clear_cached_service_tokens/ClearCachedServiceTokensResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/clear_cached_service_tokens/ClearCachedServiceTokensResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` NodeStats types.NodeStatistics `json:"_nodes"` diff --git a/typedapi/security/createapikey/create_api_key.go b/typedapi/security/createapikey/create_api_key.go index 4f92de0a7b..fc3bb43bc5 100644 --- a/typedapi/security/createapikey/create_api_key.go +++ b/typedapi/security/createapikey/create_api_key.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates an API key for access without requiring basic authentication. package createapikey diff --git a/typedapi/security/createapikey/request.go b/typedapi/security/createapikey/request.go index 0d007c4968..40a2826157 100644 --- a/typedapi/security/createapikey/request.go +++ b/typedapi/security/createapikey/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package createapikey @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package createapikey // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/create_api_key/SecurityCreateApiKeyRequest.ts#L26-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/create_api_key/SecurityCreateApiKeyRequest.ts#L26-L58 type Request struct { // Expiration Expiration time for the API key. By default, API keys never expire. @@ -90,17 +90,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "expiration": if err := dec.Decode(&s.Expiration); err != nil { - return err + return fmt.Errorf("%s | %w", "Expiration", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "role_descriptors": @@ -108,7 +108,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.RoleDescriptors = make(map[string]types.RoleDescriptor, 0) } if err := dec.Decode(&s.RoleDescriptors); err != nil { - return err + return fmt.Errorf("%s | %w", "RoleDescriptors", err) } } diff --git a/typedapi/security/createapikey/response.go b/typedapi/security/createapikey/response.go index c65c83cbb8..19557a3479 100644 --- a/typedapi/security/createapikey/response.go +++ b/typedapi/security/createapikey/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package createapikey // Response holds the response body struct for the package createapikey // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/create_api_key/SecurityCreateApiKeyResponse.ts#L23-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/create_api_key/SecurityCreateApiKeyResponse.ts#L23-L50 type Response struct { // ApiKey Generated API key. diff --git a/typedapi/security/createcrossclusterapikey/create_cross_cluster_api_key.go b/typedapi/security/createcrossclusterapikey/create_cross_cluster_api_key.go index e63cf55f5f..f94f4e41a0 100644 --- a/typedapi/security/createcrossclusterapikey/create_cross_cluster_api_key.go +++ b/typedapi/security/createcrossclusterapikey/create_cross_cluster_api_key.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a cross-cluster API key for API key based remote cluster access. package createcrossclusterapikey diff --git a/typedapi/security/createservicetoken/create_service_token.go b/typedapi/security/createservicetoken/create_service_token.go index f641ccfe11..681dd46b94 100644 --- a/typedapi/security/createservicetoken/create_service_token.go +++ b/typedapi/security/createservicetoken/create_service_token.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a service account token for access without requiring basic // authentication. diff --git a/typedapi/security/createservicetoken/response.go b/typedapi/security/createservicetoken/response.go index 3bce954589..aa37821edc 100644 --- a/typedapi/security/createservicetoken/response.go +++ b/typedapi/security/createservicetoken/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package createservicetoken @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package createservicetoken // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/create_service_token/CreateServiceTokenResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/create_service_token/CreateServiceTokenResponse.ts#L22-L27 type Response struct { Created bool `json:"created"` Token types.ServiceToken `json:"token"` diff --git a/typedapi/security/deleteprivileges/delete_privileges.go b/typedapi/security/deleteprivileges/delete_privileges.go index 9ac25587d7..aa2765f633 100644 --- a/typedapi/security/deleteprivileges/delete_privileges.go +++ b/typedapi/security/deleteprivileges/delete_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Removes application privileges. package deleteprivileges diff --git a/typedapi/security/deleteprivileges/response.go b/typedapi/security/deleteprivileges/response.go index dee008f814..63d0bed9ff 100644 --- a/typedapi/security/deleteprivileges/response.go +++ b/typedapi/security/deleteprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deleteprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deleteprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/delete_privileges/SecurityDeletePrivilegesResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/delete_privileges/SecurityDeletePrivilegesResponse.ts#L23-L25 type Response map[string]map[string]types.FoundStatus diff --git a/typedapi/security/deleterole/delete_role.go b/typedapi/security/deleterole/delete_role.go index c794c62b19..d81f16a768 100644 --- a/typedapi/security/deleterole/delete_role.go +++ b/typedapi/security/deleterole/delete_role.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Removes roles in the native realm. package deleterole diff --git a/typedapi/security/deleterole/response.go b/typedapi/security/deleterole/response.go index 46a3271158..4af31a00f6 100644 --- a/typedapi/security/deleterole/response.go +++ b/typedapi/security/deleterole/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deleterole // Response holds the response body struct for the package deleterole // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/delete_role/SecurityDeleteRoleResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/delete_role/SecurityDeleteRoleResponse.ts#L20-L22 type Response struct { Found bool `json:"found"` } diff --git a/typedapi/security/deleterolemapping/delete_role_mapping.go b/typedapi/security/deleterolemapping/delete_role_mapping.go index b5223cb92f..f5bc33839b 100644 --- a/typedapi/security/deleterolemapping/delete_role_mapping.go +++ b/typedapi/security/deleterolemapping/delete_role_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Removes role mappings. package deleterolemapping diff --git a/typedapi/security/deleterolemapping/response.go b/typedapi/security/deleterolemapping/response.go index 4f598f1614..1055a51211 100644 --- a/typedapi/security/deleterolemapping/response.go +++ b/typedapi/security/deleterolemapping/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deleterolemapping // Response holds the response body struct for the package deleterolemapping // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/delete_role_mapping/SecurityDeleteRoleMappingResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/delete_role_mapping/SecurityDeleteRoleMappingResponse.ts#L20-L22 type Response struct { Found bool `json:"found"` } diff --git a/typedapi/security/deleteservicetoken/delete_service_token.go b/typedapi/security/deleteservicetoken/delete_service_token.go index 4ebae186ed..14c684bff2 100644 --- a/typedapi/security/deleteservicetoken/delete_service_token.go +++ b/typedapi/security/deleteservicetoken/delete_service_token.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes a service account token. package deleteservicetoken diff --git a/typedapi/security/deleteservicetoken/response.go b/typedapi/security/deleteservicetoken/response.go index e72024c110..388cd607da 100644 --- a/typedapi/security/deleteservicetoken/response.go +++ b/typedapi/security/deleteservicetoken/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deleteservicetoken // Response holds the response body struct for the package deleteservicetoken // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/delete_service_token/DeleteServiceTokenResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/delete_service_token/DeleteServiceTokenResponse.ts#L20-L22 type Response struct { Found bool `json:"found"` } diff --git a/typedapi/security/deleteuser/delete_user.go b/typedapi/security/deleteuser/delete_user.go index 3d9d238f0d..a12846fe0a 100644 --- a/typedapi/security/deleteuser/delete_user.go +++ b/typedapi/security/deleteuser/delete_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes users from the native realm. package deleteuser diff --git a/typedapi/security/deleteuser/response.go b/typedapi/security/deleteuser/response.go index ceb2da6392..dfebc2bbe8 100644 --- a/typedapi/security/deleteuser/response.go +++ b/typedapi/security/deleteuser/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deleteuser // Response holds the response body struct for the package deleteuser // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/delete_user/SecurityDeleteUserResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/delete_user/SecurityDeleteUserResponse.ts#L20-L22 type Response struct { Found bool `json:"found"` } diff --git a/typedapi/security/disableuser/disable_user.go b/typedapi/security/disableuser/disable_user.go index 7d8b334a14..8a25804dfd 100644 --- a/typedapi/security/disableuser/disable_user.go +++ b/typedapi/security/disableuser/disable_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Disables users in the native realm. package disableuser diff --git a/typedapi/security/disableuser/response.go b/typedapi/security/disableuser/response.go index 4ac6367379..9ae4065202 100644 --- a/typedapi/security/disableuser/response.go +++ b/typedapi/security/disableuser/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package disableuser // Response holds the response body struct for the package disableuser // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/disable_user/SecurityDisableUserResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/disable_user/SecurityDisableUserResponse.ts#L20-L22 type Response struct { } diff --git a/typedapi/security/disableuserprofile/disable_user_profile.go b/typedapi/security/disableuserprofile/disable_user_profile.go index 25599a5264..1fa43c9ef2 100644 --- a/typedapi/security/disableuserprofile/disable_user_profile.go +++ b/typedapi/security/disableuserprofile/disable_user_profile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Disables a user profile so it's not visible in user profile searches. package disableuserprofile diff --git a/typedapi/security/disableuserprofile/response.go b/typedapi/security/disableuserprofile/response.go index 80558c5284..49c7fe1873 100644 --- a/typedapi/security/disableuserprofile/response.go +++ b/typedapi/security/disableuserprofile/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package disableuserprofile // Response holds the response body struct for the package disableuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/disable_user_profile/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/disable_user_profile/Response.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/security/enableuser/enable_user.go b/typedapi/security/enableuser/enable_user.go index b7ae1fa26e..2a0e8594d6 100644 --- a/typedapi/security/enableuser/enable_user.go +++ b/typedapi/security/enableuser/enable_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Enables users in the native realm. package enableuser diff --git a/typedapi/security/enableuser/response.go b/typedapi/security/enableuser/response.go index 809d8e80d2..e1cbe5745a 100644 --- a/typedapi/security/enableuser/response.go +++ b/typedapi/security/enableuser/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package enableuser // Response holds the response body struct for the package enableuser // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/enable_user/SecurityEnableUserResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/enable_user/SecurityEnableUserResponse.ts#L20-L22 type Response struct { } diff --git a/typedapi/security/enableuserprofile/enable_user_profile.go b/typedapi/security/enableuserprofile/enable_user_profile.go index 5b8f6436bb..0fc2ea0542 100644 --- a/typedapi/security/enableuserprofile/enable_user_profile.go +++ b/typedapi/security/enableuserprofile/enable_user_profile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Enables a user profile so it's visible in user profile searches. package enableuserprofile diff --git a/typedapi/security/enableuserprofile/response.go b/typedapi/security/enableuserprofile/response.go index 405266301b..16831dd1c6 100644 --- a/typedapi/security/enableuserprofile/response.go +++ b/typedapi/security/enableuserprofile/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package enableuserprofile // Response holds the response body struct for the package enableuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/enable_user_profile/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/enable_user_profile/Response.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/security/enrollkibana/enroll_kibana.go b/typedapi/security/enrollkibana/enroll_kibana.go index 1f2bb5a1ab..71cedc248b 100644 --- a/typedapi/security/enrollkibana/enroll_kibana.go +++ b/typedapi/security/enrollkibana/enroll_kibana.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows a kibana instance to configure itself to communicate with a secured // elasticsearch cluster. diff --git a/typedapi/security/enrollkibana/response.go b/typedapi/security/enrollkibana/response.go index eed687f50b..3bc530ceb9 100644 --- a/typedapi/security/enrollkibana/response.go +++ b/typedapi/security/enrollkibana/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package enrollkibana @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package enrollkibana // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/enroll_kibana/Response.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/enroll_kibana/Response.ts#L20-L25 type Response struct { HttpCa string `json:"http_ca"` Token types.KibanaToken `json:"token"` diff --git a/typedapi/security/enrollnode/enroll_node.go b/typedapi/security/enrollnode/enroll_node.go index 61057f552e..ee00ccaeb3 100644 --- a/typedapi/security/enrollnode/enroll_node.go +++ b/typedapi/security/enrollnode/enroll_node.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Allows a new node to enroll to an existing cluster with security enabled. package enrollnode diff --git a/typedapi/security/enrollnode/response.go b/typedapi/security/enrollnode/response.go index ff0b5dcceb..1d2f3a3e65 100644 --- a/typedapi/security/enrollnode/response.go +++ b/typedapi/security/enrollnode/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package enrollnode // Response holds the response body struct for the package enrollnode // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/enroll_node/Response.ts#L20-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/enroll_node/Response.ts#L20-L29 type Response struct { HttpCaCert string `json:"http_ca_cert"` HttpCaKey string `json:"http_ca_key"` diff --git a/typedapi/security/getapikey/get_api_key.go b/typedapi/security/getapikey/get_api_key.go index ae6a741175..63a30573f9 100644 --- a/typedapi/security/getapikey/get_api_key.go +++ b/typedapi/security/getapikey/get_api_key.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information for one or more API keys. package getapikey diff --git a/typedapi/security/getapikey/response.go b/typedapi/security/getapikey/response.go index 2a9781567b..23e52233e4 100644 --- a/typedapi/security/getapikey/response.go +++ b/typedapi/security/getapikey/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getapikey @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getapikey // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_api_key/SecurityGetApiKeyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_api_key/SecurityGetApiKeyResponse.ts#L22-L24 type Response struct { ApiKeys []types.ApiKey `json:"api_keys"` } diff --git a/typedapi/security/getbuiltinprivileges/get_builtin_privileges.go b/typedapi/security/getbuiltinprivileges/get_builtin_privileges.go index a6986cb180..dd49693220 100644 --- a/typedapi/security/getbuiltinprivileges/get_builtin_privileges.go +++ b/typedapi/security/getbuiltinprivileges/get_builtin_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves the list of cluster privileges and index privileges that are // available in this version of Elasticsearch. diff --git a/typedapi/security/getbuiltinprivileges/response.go b/typedapi/security/getbuiltinprivileges/response.go index 2a06d5c997..af05f230a9 100644 --- a/typedapi/security/getbuiltinprivileges/response.go +++ b/typedapi/security/getbuiltinprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getbuiltinprivileges @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Response holds the response body struct for the package getbuiltinprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_builtin_privileges/SecurityGetBuiltinPrivilegesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_builtin_privileges/SecurityGetBuiltinPrivilegesResponse.ts#L22-L24 type Response struct { Cluster []string `json:"cluster"` Index []string `json:"index"` @@ -57,7 +58,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "cluster": if err := dec.Decode(&s.Cluster); err != nil { - return err + return fmt.Errorf("%s | %w", "Cluster", err) } case "index": @@ -66,13 +67,13 @@ func (s *Response) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = append(s.Index, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } } diff --git a/typedapi/security/getprivileges/get_privileges.go b/typedapi/security/getprivileges/get_privileges.go index 953fec23df..8ca436593c 100644 --- a/typedapi/security/getprivileges/get_privileges.go +++ b/typedapi/security/getprivileges/get_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves application privileges. package getprivileges diff --git a/typedapi/security/getprivileges/response.go b/typedapi/security/getprivileges/response.go index eb160f8c36..b7638cfce4 100644 --- a/typedapi/security/getprivileges/response.go +++ b/typedapi/security/getprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_privileges/SecurityGetPrivilegesResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_privileges/SecurityGetPrivilegesResponse.ts#L23-L25 type Response map[string]map[string]types.PrivilegesActions diff --git a/typedapi/security/getrole/get_role.go b/typedapi/security/getrole/get_role.go index e35610aa7b..93add6ed11 100644 --- a/typedapi/security/getrole/get_role.go +++ b/typedapi/security/getrole/get_role.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves roles in the native realm. package getrole diff --git a/typedapi/security/getrole/response.go b/typedapi/security/getrole/response.go index b1724301fd..463fd638f0 100644 --- a/typedapi/security/getrole/response.go +++ b/typedapi/security/getrole/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getrole @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrole // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_role/SecurityGetRoleResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_role/SecurityGetRoleResponse.ts#L23-L25 type Response map[string]types.Role diff --git a/typedapi/security/getrolemapping/get_role_mapping.go b/typedapi/security/getrolemapping/get_role_mapping.go index eaa8ad7d42..9a7cf31d6a 100644 --- a/typedapi/security/getrolemapping/get_role_mapping.go +++ b/typedapi/security/getrolemapping/get_role_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves role mappings. package getrolemapping diff --git a/typedapi/security/getrolemapping/response.go b/typedapi/security/getrolemapping/response.go index a19d7fd5dc..97a663d473 100644 --- a/typedapi/security/getrolemapping/response.go +++ b/typedapi/security/getrolemapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getrolemapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrolemapping // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_role_mapping/SecurityGetRoleMappingResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_role_mapping/SecurityGetRoleMappingResponse.ts#L23-L25 type Response map[string]types.SecurityRoleMapping diff --git a/typedapi/security/getserviceaccounts/get_service_accounts.go b/typedapi/security/getserviceaccounts/get_service_accounts.go index 0aa8a8ec5b..9d327f288f 100644 --- a/typedapi/security/getserviceaccounts/get_service_accounts.go +++ b/typedapi/security/getserviceaccounts/get_service_accounts.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information about service accounts. package getserviceaccounts diff --git a/typedapi/security/getserviceaccounts/response.go b/typedapi/security/getserviceaccounts/response.go index cd0f966bf4..e6e863ffe3 100644 --- a/typedapi/security/getserviceaccounts/response.go +++ b/typedapi/security/getserviceaccounts/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getserviceaccounts @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getserviceaccounts // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_service_accounts/GetServiceAccountsResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_service_accounts/GetServiceAccountsResponse.ts#L23-L25 type Response map[string]types.RoleDescriptorWrapper diff --git a/typedapi/security/getservicecredentials/get_service_credentials.go b/typedapi/security/getservicecredentials/get_service_credentials.go index 41bd44e14c..c1c87d1682 100644 --- a/typedapi/security/getservicecredentials/get_service_credentials.go +++ b/typedapi/security/getservicecredentials/get_service_credentials.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information of all service credentials for a service account. package getservicecredentials diff --git a/typedapi/security/getservicecredentials/response.go b/typedapi/security/getservicecredentials/response.go index 42388c941e..7cec096a8b 100644 --- a/typedapi/security/getservicecredentials/response.go +++ b/typedapi/security/getservicecredentials/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getservicecredentials @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getservicecredentials // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_service_credentials/GetServiceCredentialsResponse.ts#L25-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_service_credentials/GetServiceCredentialsResponse.ts#L25-L33 type Response struct { Count int `json:"count"` // NodesCredentials Contains service account credentials collected from all nodes of the cluster diff --git a/typedapi/security/getsettings/get_settings.go b/typedapi/security/getsettings/get_settings.go index b83c66d59b..5360abb1a5 100644 --- a/typedapi/security/getsettings/get_settings.go +++ b/typedapi/security/getsettings/get_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieve settings for the security system indices package getsettings diff --git a/typedapi/security/gettoken/get_token.go b/typedapi/security/gettoken/get_token.go index 17215b6043..30b26de73b 100644 --- a/typedapi/security/gettoken/get_token.go +++ b/typedapi/security/gettoken/get_token.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a bearer token for access without requiring basic authentication. package gettoken diff --git a/typedapi/security/gettoken/request.go b/typedapi/security/gettoken/request.go index 691d547270..2426d061ee 100644 --- a/typedapi/security/gettoken/request.go +++ b/typedapi/security/gettoken/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package gettoken @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package gettoken // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_token/GetUserAccessTokenRequest.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_token/GetUserAccessTokenRequest.ts#L25-L39 type Request struct { GrantType *accesstokengranttype.AccessTokenGrantType `json:"grant_type,omitempty"` KerberosTicket *string `json:"kerberos_ticket,omitempty"` @@ -77,13 +77,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "grant_type": if err := dec.Decode(&s.GrantType); err != nil { - return err + return fmt.Errorf("%s | %w", "GrantType", err) } case "kerberos_ticket": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "KerberosTicket", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -94,13 +94,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "password": if err := dec.Decode(&s.Password); err != nil { - return err + return fmt.Errorf("%s | %w", "Password", err) } case "refresh_token": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshToken", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -112,7 +112,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "scope": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Scope", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -123,7 +123,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "username": if err := dec.Decode(&s.Username); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } } diff --git a/typedapi/security/gettoken/response.go b/typedapi/security/gettoken/response.go index 7cb105e64d..ccd55dd66d 100644 --- a/typedapi/security/gettoken/response.go +++ b/typedapi/security/gettoken/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package gettoken @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettoken // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_token/GetUserAccessTokenResponse.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_token/GetUserAccessTokenResponse.ts#L23-L33 type Response struct { AccessToken string `json:"access_token"` Authentication types.AuthenticatedUser `json:"authentication"` diff --git a/typedapi/security/getuser/get_user.go b/typedapi/security/getuser/get_user.go index 9e2e0bc9fa..0630d86afe 100644 --- a/typedapi/security/getuser/get_user.go +++ b/typedapi/security/getuser/get_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information about users in the native realm and built-in users. package getuser diff --git a/typedapi/security/getuser/response.go b/typedapi/security/getuser/response.go index 1e0d32e6e6..53af56f905 100644 --- a/typedapi/security/getuser/response.go +++ b/typedapi/security/getuser/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getuser @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getuser // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_user/SecurityGetUserResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_user/SecurityGetUserResponse.ts#L23-L25 type Response map[string]types.User diff --git a/typedapi/security/getuserprivileges/get_user_privileges.go b/typedapi/security/getuserprivileges/get_user_privileges.go index efe7b41644..dc6a066c94 100644 --- a/typedapi/security/getuserprivileges/get_user_privileges.go +++ b/typedapi/security/getuserprivileges/get_user_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves security privileges for the logged in user. package getuserprivileges diff --git a/typedapi/security/getuserprivileges/response.go b/typedapi/security/getuserprivileges/response.go index a2d6422dbf..2a16c83535 100644 --- a/typedapi/security/getuserprivileges/response.go +++ b/typedapi/security/getuserprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getuserprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getuserprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_user_privileges/SecurityGetUserPrivilegesResponse.ts#L27-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_user_privileges/SecurityGetUserPrivilegesResponse.ts#L27-L35 type Response struct { Applications []types.ApplicationPrivileges `json:"applications"` Cluster []string `json:"cluster"` diff --git a/typedapi/security/getuserprofile/get_user_profile.go b/typedapi/security/getuserprofile/get_user_profile.go index bdb8741fad..615512746d 100644 --- a/typedapi/security/getuserprofile/get_user_profile.go +++ b/typedapi/security/getuserprofile/get_user_profile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves user profiles for the given unique ID(s). package getuserprofile diff --git a/typedapi/security/getuserprofile/response.go b/typedapi/security/getuserprofile/response.go index 79e20f6a5c..2defd62e9c 100644 --- a/typedapi/security/getuserprofile/response.go +++ b/typedapi/security/getuserprofile/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getuserprofile @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_user_profile/Response.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_user_profile/Response.ts#L23-L28 type Response struct { Errors *types.GetUserProfileErrors `json:"errors,omitempty"` Profiles []types.UserProfileWithMetadata `json:"profiles"` diff --git a/typedapi/security/grantapikey/grant_api_key.go b/typedapi/security/grantapikey/grant_api_key.go index d7a4fe807e..25d9b33e35 100644 --- a/typedapi/security/grantapikey/grant_api_key.go +++ b/typedapi/security/grantapikey/grant_api_key.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates an API key on behalf of another user. package grantapikey diff --git a/typedapi/security/grantapikey/request.go b/typedapi/security/grantapikey/request.go index 1ccb96e129..0b141ccdc2 100644 --- a/typedapi/security/grantapikey/request.go +++ b/typedapi/security/grantapikey/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package grantapikey @@ -34,7 +34,7 @@ import ( // Request holds the request body struct for the package grantapikey // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/grant_api_key/SecurityGrantApiKeyRequest.ts#L24-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/grant_api_key/SecurityGrantApiKeyRequest.ts#L24-L75 type Request struct { // AccessToken The user’s access token. @@ -92,7 +92,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "access_token": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AccessToken", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -103,27 +103,27 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "api_key": if err := dec.Decode(&s.ApiKey); err != nil { - return err + return fmt.Errorf("%s | %w", "ApiKey", err) } case "grant_type": if err := dec.Decode(&s.GrantType); err != nil { - return err + return fmt.Errorf("%s | %w", "GrantType", err) } case "password": if err := dec.Decode(&s.Password); err != nil { - return err + return fmt.Errorf("%s | %w", "Password", err) } case "run_as": if err := dec.Decode(&s.RunAs); err != nil { - return err + return fmt.Errorf("%s | %w", "RunAs", err) } case "username": if err := dec.Decode(&s.Username); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } } diff --git a/typedapi/security/grantapikey/response.go b/typedapi/security/grantapikey/response.go index a9a4029538..9c5d5aad37 100644 --- a/typedapi/security/grantapikey/response.go +++ b/typedapi/security/grantapikey/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package grantapikey // Response holds the response body struct for the package grantapikey // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/grant_api_key/SecurityGrantApiKeyResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/grant_api_key/SecurityGrantApiKeyResponse.ts#L23-L31 type Response struct { ApiKey string `json:"api_key"` Encoded string `json:"encoded"` diff --git a/typedapi/security/hasprivileges/has_privileges.go b/typedapi/security/hasprivileges/has_privileges.go index d04d32b3f1..22e5337914 100644 --- a/typedapi/security/hasprivileges/has_privileges.go +++ b/typedapi/security/hasprivileges/has_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Determines whether the specified user has a specified list of privileges. package hasprivileges diff --git a/typedapi/security/hasprivileges/request.go b/typedapi/security/hasprivileges/request.go index 63424b8a0a..8795f59fec 100644 --- a/typedapi/security/hasprivileges/request.go +++ b/typedapi/security/hasprivileges/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package hasprivileges @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package hasprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/has_privileges/SecurityHasPrivilegesRequest.ts#L25-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/has_privileges/SecurityHasPrivilegesRequest.ts#L25-L42 type Request struct { Application []types.ApplicationPrivilegesCheck `json:"application,omitempty"` // Cluster A list of the cluster privileges that you want to check. diff --git a/typedapi/security/hasprivileges/response.go b/typedapi/security/hasprivileges/response.go index c05add76b0..27498e6666 100644 --- a/typedapi/security/hasprivileges/response.go +++ b/typedapi/security/hasprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package hasprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package hasprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/has_privileges/SecurityHasPrivilegesResponse.ts#L24-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/has_privileges/SecurityHasPrivilegesResponse.ts#L24-L32 type Response struct { Application types.ApplicationsPrivileges `json:"application"` Cluster map[string]bool `json:"cluster"` diff --git a/typedapi/security/hasprivilegesuserprofile/has_privileges_user_profile.go b/typedapi/security/hasprivilegesuserprofile/has_privileges_user_profile.go index 631b62b066..9dfcbd775a 100644 --- a/typedapi/security/hasprivilegesuserprofile/has_privileges_user_profile.go +++ b/typedapi/security/hasprivilegesuserprofile/has_privileges_user_profile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Determines whether the users associated with the specified profile IDs have // all the requested privileges. diff --git a/typedapi/security/hasprivilegesuserprofile/request.go b/typedapi/security/hasprivilegesuserprofile/request.go index b0056a2c1a..3536954bf4 100644 --- a/typedapi/security/hasprivilegesuserprofile/request.go +++ b/typedapi/security/hasprivilegesuserprofile/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package hasprivilegesuserprofile @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package hasprivilegesuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/has_privileges_user_profile/Request.ts#L24-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/has_privileges_user_profile/Request.ts#L24-L38 type Request struct { Privileges types.PrivilegesCheck `json:"privileges"` // Uids A list of profile IDs. The privileges are checked for associated users of the diff --git a/typedapi/security/hasprivilegesuserprofile/response.go b/typedapi/security/hasprivilegesuserprofile/response.go index a98a5d5e0e..57533f498a 100644 --- a/typedapi/security/hasprivilegesuserprofile/response.go +++ b/typedapi/security/hasprivilegesuserprofile/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package hasprivilegesuserprofile @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package hasprivilegesuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/has_privileges_user_profile/Response.ts#L23-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/has_privileges_user_profile/Response.ts#L23-L38 type Response struct { // Errors The subset of the requested profile IDs for which an error diff --git a/typedapi/security/invalidateapikey/invalidate_api_key.go b/typedapi/security/invalidateapikey/invalidate_api_key.go index f5e303892b..4e964c80dd 100644 --- a/typedapi/security/invalidateapikey/invalidate_api_key.go +++ b/typedapi/security/invalidateapikey/invalidate_api_key.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Invalidates one or more API keys. package invalidateapikey diff --git a/typedapi/security/invalidateapikey/request.go b/typedapi/security/invalidateapikey/request.go index d1670cebe5..f4b1cc87c3 100644 --- a/typedapi/security/invalidateapikey/request.go +++ b/typedapi/security/invalidateapikey/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package invalidateapikey @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package invalidateapikey // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/invalidate_api_key/SecurityInvalidateApiKeyRequest.ts#L23-L66 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/invalidate_api_key/SecurityInvalidateApiKeyRequest.ts#L23-L66 type Request struct { Id *string `json:"id,omitempty"` // Ids A list of API key ids. @@ -90,17 +90,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "ids": if err := dec.Decode(&s.Ids); err != nil { - return err + return fmt.Errorf("%s | %w", "Ids", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "owner": @@ -110,7 +110,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Owner", err) } s.Owner = &value case bool: @@ -120,7 +120,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "realm_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RealmName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -131,7 +131,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "username": if err := dec.Decode(&s.Username); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } } diff --git a/typedapi/security/invalidateapikey/response.go b/typedapi/security/invalidateapikey/response.go index 155bc16ea4..ddde04853c 100644 --- a/typedapi/security/invalidateapikey/response.go +++ b/typedapi/security/invalidateapikey/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package invalidateapikey @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package invalidateapikey // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/invalidate_api_key/SecurityInvalidateApiKeyResponse.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/invalidate_api_key/SecurityInvalidateApiKeyResponse.ts#L23-L30 type Response struct { ErrorCount int `json:"error_count"` ErrorDetails []types.ErrorCause `json:"error_details,omitempty"` diff --git a/typedapi/security/invalidatetoken/invalidate_token.go b/typedapi/security/invalidatetoken/invalidate_token.go index 71b1925499..164a58f37f 100644 --- a/typedapi/security/invalidatetoken/invalidate_token.go +++ b/typedapi/security/invalidatetoken/invalidate_token.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Invalidates one or more access tokens or refresh tokens. package invalidatetoken diff --git a/typedapi/security/invalidatetoken/request.go b/typedapi/security/invalidatetoken/request.go index d0dc57834b..4d29849b9a 100644 --- a/typedapi/security/invalidatetoken/request.go +++ b/typedapi/security/invalidatetoken/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package invalidatetoken @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package invalidatetoken // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/invalidate_token/SecurityInvalidateTokenRequest.ts#L23-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/invalidate_token/SecurityInvalidateTokenRequest.ts#L23-L35 type Request struct { RealmName *string `json:"realm_name,omitempty"` RefreshToken *string `json:"refresh_token,omitempty"` @@ -73,13 +73,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "realm_name": if err := dec.Decode(&s.RealmName); err != nil { - return err + return fmt.Errorf("%s | %w", "RealmName", err) } case "refresh_token": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshToken", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -91,7 +91,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "token": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Token", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -102,7 +102,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "username": if err := dec.Decode(&s.Username); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } } diff --git a/typedapi/security/invalidatetoken/response.go b/typedapi/security/invalidatetoken/response.go index 6cba68f53d..e94b04d36e 100644 --- a/typedapi/security/invalidatetoken/response.go +++ b/typedapi/security/invalidatetoken/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package invalidatetoken @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package invalidatetoken // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/invalidate_token/SecurityInvalidateTokenResponse.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/invalidate_token/SecurityInvalidateTokenResponse.ts#L23-L30 type Response struct { ErrorCount int64 `json:"error_count"` ErrorDetails []types.ErrorCause `json:"error_details,omitempty"` diff --git a/typedapi/security/oidcauthenticate/oidc_authenticate.go b/typedapi/security/oidcauthenticate/oidc_authenticate.go index 4a40f210a4..e59d819231 100644 --- a/typedapi/security/oidcauthenticate/oidc_authenticate.go +++ b/typedapi/security/oidcauthenticate/oidc_authenticate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Exchanges an OpenID Connection authentication response message for an // Elasticsearch access token and refresh token pair diff --git a/typedapi/security/oidclogout/oidc_logout.go b/typedapi/security/oidclogout/oidc_logout.go index 29ac83a7c4..de26669c4a 100644 --- a/typedapi/security/oidclogout/oidc_logout.go +++ b/typedapi/security/oidclogout/oidc_logout.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Invalidates a refresh token and access token that was generated from the // OpenID Connect Authenticate API diff --git a/typedapi/security/oidcprepareauthentication/oidc_prepare_authentication.go b/typedapi/security/oidcprepareauthentication/oidc_prepare_authentication.go index 9a20149232..b8a7b903ed 100644 --- a/typedapi/security/oidcprepareauthentication/oidc_prepare_authentication.go +++ b/typedapi/security/oidcprepareauthentication/oidc_prepare_authentication.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates an OAuth 2.0 authentication request as a URL string package oidcprepareauthentication diff --git a/typedapi/security/putprivileges/put_privileges.go b/typedapi/security/putprivileges/put_privileges.go index d873d55c39..fccace2873 100644 --- a/typedapi/security/putprivileges/put_privileges.go +++ b/typedapi/security/putprivileges/put_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Adds or updates application privileges. package putprivileges diff --git a/typedapi/security/putprivileges/request.go b/typedapi/security/putprivileges/request.go index 0d3f556ccf..c8c866fe6d 100644 --- a/typedapi/security/putprivileges/request.go +++ b/typedapi/security/putprivileges/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putprivileges @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package putprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/put_privileges/SecurityPutPrivilegesRequest.ts#L25-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/put_privileges/SecurityPutPrivilegesRequest.ts#L25-L37 type Request = map[string]map[string]types.PrivilegesActions diff --git a/typedapi/security/putprivileges/response.go b/typedapi/security/putprivileges/response.go index bd60d64666..429ee5f542 100644 --- a/typedapi/security/putprivileges/response.go +++ b/typedapi/security/putprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/put_privileges/SecurityPutPrivilegesResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/put_privileges/SecurityPutPrivilegesResponse.ts#L23-L25 type Response map[string]map[string]types.CreatedStatus diff --git a/typedapi/security/putrole/put_role.go b/typedapi/security/putrole/put_role.go index 753c6c9674..85840ce0da 100644 --- a/typedapi/security/putrole/put_role.go +++ b/typedapi/security/putrole/put_role.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Adds and updates roles in the native realm. package putrole @@ -386,7 +386,7 @@ func (r *PutRole) RunAs(runas ...string) *PutRole { // `false`, the role is ignored, but is still listed in the response from the // authenticate API. // API name: transient_metadata -func (r *PutRole) TransientMetadata(transientmetadata *types.TransientMetadataConfig) *PutRole { +func (r *PutRole) TransientMetadata(transientmetadata map[string]json.RawMessage) *PutRole { r.req.TransientMetadata = transientmetadata diff --git a/typedapi/security/putrole/request.go b/typedapi/security/putrole/request.go index 8c6a4df08b..20408c7589 100644 --- a/typedapi/security/putrole/request.go +++ b/typedapi/security/putrole/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putrole @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putrole // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/put_role/SecurityPutRoleRequest.ts#L31-L80 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/put_role/SecurityPutRoleRequest.ts#L30-L79 type Request struct { // Applications A list of application privilege entries. @@ -58,13 +58,14 @@ type Request struct { // updated dynamically to list the incompatible features. If `enabled` is // `false`, the role is ignored, but is still listed in the response from the // authenticate API. - TransientMetadata *types.TransientMetadataConfig `json:"transient_metadata,omitempty"` + TransientMetadata map[string]json.RawMessage `json:"transient_metadata,omitempty"` } // NewRequest returns a Request func NewRequest() *Request { r := &Request{ - Global: make(map[string]json.RawMessage, 0), + Global: make(map[string]json.RawMessage, 0), + TransientMetadata: make(map[string]json.RawMessage, 0), } return r } @@ -97,12 +98,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "applications": if err := dec.Decode(&s.Applications); err != nil { - return err + return fmt.Errorf("%s | %w", "Applications", err) } case "cluster": if err := dec.Decode(&s.Cluster); err != nil { - return err + return fmt.Errorf("%s | %w", "Cluster", err) } case "global": @@ -110,27 +111,30 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Global = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Global); err != nil { - return err + return fmt.Errorf("%s | %w", "Global", err) } case "indices": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "run_as": if err := dec.Decode(&s.RunAs); err != nil { - return err + return fmt.Errorf("%s | %w", "RunAs", err) } case "transient_metadata": + if s.TransientMetadata == nil { + s.TransientMetadata = make(map[string]json.RawMessage, 0) + } if err := dec.Decode(&s.TransientMetadata); err != nil { - return err + return fmt.Errorf("%s | %w", "TransientMetadata", err) } } diff --git a/typedapi/security/putrole/response.go b/typedapi/security/putrole/response.go index 81424bcf75..fed6f415ba 100644 --- a/typedapi/security/putrole/response.go +++ b/typedapi/security/putrole/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putrole @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putrole // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/put_role/SecurityPutRoleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/put_role/SecurityPutRoleResponse.ts#L22-L24 type Response struct { Role types.CreatedStatus `json:"role"` } diff --git a/typedapi/security/putrolemapping/put_role_mapping.go b/typedapi/security/putrolemapping/put_role_mapping.go index e605ba45b0..90f97112f8 100644 --- a/typedapi/security/putrolemapping/put_role_mapping.go +++ b/typedapi/security/putrolemapping/put_role_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates and updates role mappings. package putrolemapping diff --git a/typedapi/security/putrolemapping/request.go b/typedapi/security/putrolemapping/request.go index bb15e601fa..61bba10458 100644 --- a/typedapi/security/putrolemapping/request.go +++ b/typedapi/security/putrolemapping/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putrolemapping @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putrolemapping // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/put_role_mapping/SecurityPutRoleMappingRequest.ts#L25-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/put_role_mapping/SecurityPutRoleMappingRequest.ts#L25-L45 type Request struct { Enabled *bool `json:"enabled,omitempty"` Metadata types.Metadata `json:"metadata,omitempty"` @@ -82,7 +82,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = &value case bool: @@ -91,27 +91,27 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "role_templates": if err := dec.Decode(&s.RoleTemplates); err != nil { - return err + return fmt.Errorf("%s | %w", "RoleTemplates", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "rules": if err := dec.Decode(&s.Rules); err != nil { - return err + return fmt.Errorf("%s | %w", "Rules", err) } case "run_as": if err := dec.Decode(&s.RunAs); err != nil { - return err + return fmt.Errorf("%s | %w", "RunAs", err) } } diff --git a/typedapi/security/putrolemapping/response.go b/typedapi/security/putrolemapping/response.go index e92e4b6c5c..f7a71a46a9 100644 --- a/typedapi/security/putrolemapping/response.go +++ b/typedapi/security/putrolemapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putrolemapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putrolemapping // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/put_role_mapping/SecurityPutRoleMappingResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/put_role_mapping/SecurityPutRoleMappingResponse.ts#L22-L24 type Response struct { Created *bool `json:"created,omitempty"` RoleMapping types.CreatedStatus `json:"role_mapping"` diff --git a/typedapi/security/putuser/put_user.go b/typedapi/security/putuser/put_user.go index 23e5e0ac7a..5cf5c371d3 100644 --- a/typedapi/security/putuser/put_user.go +++ b/typedapi/security/putuser/put_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Adds and updates users in the native realm. These users are commonly referred // to as native users. diff --git a/typedapi/security/putuser/request.go b/typedapi/security/putuser/request.go index bd8e4d3873..e251e9dbfd 100644 --- a/typedapi/security/putuser/request.go +++ b/typedapi/security/putuser/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putuser @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putuser // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/put_user/SecurityPutUserRequest.ts#L23-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/put_user/SecurityPutUserRequest.ts#L23-L44 type Request struct { Email string `json:"email,omitempty"` Enabled *bool `json:"enabled,omitempty"` @@ -80,7 +80,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "email": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Email", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -96,7 +96,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = &value case bool: @@ -106,7 +106,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "full_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FullName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -117,18 +117,18 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "password": if err := dec.Decode(&s.Password); err != nil { - return err + return fmt.Errorf("%s | %w", "Password", err) } case "password_hash": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PasswordHash", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -139,12 +139,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "username": if err := dec.Decode(&s.Username); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } } diff --git a/typedapi/security/putuser/response.go b/typedapi/security/putuser/response.go index 950a0d4237..f162bcc9ce 100644 --- a/typedapi/security/putuser/response.go +++ b/typedapi/security/putuser/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putuser // Response holds the response body struct for the package putuser // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/put_user/SecurityPutUserResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/put_user/SecurityPutUserResponse.ts#L20-L22 type Response struct { Created bool `json:"created"` } diff --git a/typedapi/security/queryapikeys/query_api_keys.go b/typedapi/security/queryapikeys/query_api_keys.go index 157b38527b..76719704c3 100644 --- a/typedapi/security/queryapikeys/query_api_keys.go +++ b/typedapi/security/queryapikeys/query_api_keys.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information for API keys using a subset of query DSL package queryapikeys @@ -305,6 +305,22 @@ func (r *QueryApiKeys) WithLimitedBy(withlimitedby bool) *QueryApiKeys { return r } +// Aggregations Any aggregations to run over the corpus of returned API keys. +// Aggregations and queries work together. Aggregations are computed only on the +// API keys that match the query. +// This supports only a subset of aggregation types, namely: `terms`, `range`, +// `date_range`, `missing`, +// `cardinality`, `value_count`, `composite`, `filter`, and `filters`. +// Additionally, aggregations only run over the same subset of fields that query +// works with. +// API name: aggregations +func (r *QueryApiKeys) Aggregations(aggregations map[string]types.APIKeyAggregationContainer) *QueryApiKeys { + + r.req.Aggregations = aggregations + + return r +} + // From Starting document offset. // By default, you cannot page through more than 10,000 hits using the from and // size parameters. @@ -317,11 +333,16 @@ func (r *QueryApiKeys) From(from int) *QueryApiKeys { } // Query A query to filter which API keys to return. +// If the query parameter is missing, it is equivalent to a `match_all` query. // The query supports a subset of query types, including `match_all`, `bool`, -// `term`, `terms`, `ids`, `prefix`, `wildcard`, and `range`. -// You can query all public information associated with an API key. +// `term`, `terms`, `match`, +// `ids`, `prefix`, `wildcard`, `exists`, `range`, and `simple_query_string`. +// You can query the following public information associated with an API key: +// `id`, `type`, `name`, +// `creation`, `expiration`, `invalidated`, `invalidation`, `username`, `realm`, +// and `metadata`. // API name: query -func (r *QueryApiKeys) Query(query *types.Query) *QueryApiKeys { +func (r *QueryApiKeys) Query(query *types.APIKeyQueryContainer) *QueryApiKeys { r.req.Query = query diff --git a/typedapi/security/queryapikeys/request.go b/typedapi/security/queryapikeys/request.go index 37748dd55c..b8e1ca8163 100644 --- a/typedapi/security/queryapikeys/request.go +++ b/typedapi/security/queryapikeys/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package queryapikeys @@ -33,19 +33,33 @@ import ( // Request holds the request body struct for the package queryapikeys // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/query_api_keys/QueryApiKeysRequest.ts#L25-L74 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/query_api_keys/QueryApiKeysRequest.ts#L26-L86 type Request struct { + // Aggregations Any aggregations to run over the corpus of returned API keys. + // Aggregations and queries work together. Aggregations are computed only on the + // API keys that match the query. + // This supports only a subset of aggregation types, namely: `terms`, `range`, + // `date_range`, `missing`, + // `cardinality`, `value_count`, `composite`, `filter`, and `filters`. + // Additionally, aggregations only run over the same subset of fields that query + // works with. + Aggregations map[string]types.APIKeyAggregationContainer `json:"aggregations,omitempty"` // From Starting document offset. // By default, you cannot page through more than 10,000 hits using the from and // size parameters. // To page through more hits, use the `search_after` parameter. From *int `json:"from,omitempty"` // Query A query to filter which API keys to return. + // If the query parameter is missing, it is equivalent to a `match_all` query. // The query supports a subset of query types, including `match_all`, `bool`, - // `term`, `terms`, `ids`, `prefix`, `wildcard`, and `range`. - // You can query all public information associated with an API key. - Query *types.Query `json:"query,omitempty"` + // `term`, `terms`, `match`, + // `ids`, `prefix`, `wildcard`, `exists`, `range`, and `simple_query_string`. + // You can query the following public information associated with an API key: + // `id`, `type`, `name`, + // `creation`, `expiration`, `invalidated`, `invalidation`, `username`, `realm`, + // and `metadata`. + Query *types.APIKeyQueryContainer `json:"query,omitempty"` // SearchAfter Search after definition SearchAfter []types.FieldValue `json:"search_after,omitempty"` // Size The number of hits to return. @@ -61,7 +75,9 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { - r := &Request{} + r := &Request{ + Aggregations: make(map[string]types.APIKeyAggregationContainer, 0), + } return r } @@ -91,6 +107,14 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { + case "aggregations", "aggs": + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.APIKeyAggregationContainer, 0) + } + if err := dec.Decode(&s.Aggregations); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + case "from": var tmp interface{} @@ -99,7 +123,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } s.From = &value case float64: @@ -109,12 +133,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "search_after": if err := dec.Decode(&s.SearchAfter); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchAfter", err) } case "size": @@ -125,7 +149,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -139,13 +163,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(types.SortCombinations) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } s.Sort = append(s.Sort, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } } diff --git a/typedapi/security/queryapikeys/response.go b/typedapi/security/queryapikeys/response.go index 4946b9ad4b..c8cd36e4ef 100644 --- a/typedapi/security/queryapikeys/response.go +++ b/typedapi/security/queryapikeys/response.go @@ -16,19 +16,29 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package queryapikeys import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" + "strings" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // Response holds the response body struct for the package queryapikeys // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/query_api_keys/QueryApiKeysResponse.ts#L23-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/query_api_keys/QueryApiKeysResponse.ts#L26-L45 type Response struct { + // Aggregations The aggregations result, if requested. + Aggregations map[string]types.APIKeyAggregate `json:"aggregations,omitempty"` // ApiKeys A list of API key information. ApiKeys []types.ApiKey `json:"api_keys"` // Count The number of API keys returned in the response. @@ -39,6 +49,197 @@ type Response struct { // NewResponse returns a Response func NewResponse() *Response { - r := &Response{} + r := &Response{ + Aggregations: make(map[string]types.APIKeyAggregate, 0), + } return r } + +func (s *Response) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aggregations": + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.APIKeyAggregate, 0) + } + + for dec.More() { + tt, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + if value, ok := tt.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.APIKeyAggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := types.NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := types.NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := types.NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := types.NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := types.NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := types.NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := types.NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + + case "missing": + o := types.NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + + case "filter": + o := types.NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + + case "filters": + o := types.NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + + case "range": + o := types.NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := types.NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + + case "composite": + o := types.NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + s.Aggregations[value] = o + } + } + } + + case "api_keys": + if err := dec.Decode(&s.ApiKeys); err != nil { + return fmt.Errorf("%s | %w", "ApiKeys", err) + } + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "Count", err) + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + case "total": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "Total", err) + } + s.Total = value + case float64: + f := int(v) + s.Total = f + } + + } + } + return nil +} diff --git a/typedapi/security/samlauthenticate/request.go b/typedapi/security/samlauthenticate/request.go index 4c9ce7d9e3..f2beee5d25 100644 --- a/typedapi/security/samlauthenticate/request.go +++ b/typedapi/security/samlauthenticate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package samlauthenticate @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package samlauthenticate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/saml_authenticate/Request.ts#L23-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/saml_authenticate/Request.ts#L23-L38 type Request struct { // Content The SAML response as it was sent by the user’s browser, usually a Base64 @@ -80,7 +80,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "content": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Content", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -95,20 +95,20 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Ids", err) } s.Ids = append(s.Ids, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Ids); err != nil { - return err + return fmt.Errorf("%s | %w", "Ids", err) } } case "realm": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Realm", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/security/samlauthenticate/response.go b/typedapi/security/samlauthenticate/response.go index a6115c314b..52165b7ae7 100644 --- a/typedapi/security/samlauthenticate/response.go +++ b/typedapi/security/samlauthenticate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package samlauthenticate // Response holds the response body struct for the package samlauthenticate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/saml_authenticate/Response.ts#L22-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/saml_authenticate/Response.ts#L22-L30 type Response struct { AccessToken string `json:"access_token"` ExpiresIn int `json:"expires_in"` diff --git a/typedapi/security/samlauthenticate/saml_authenticate.go b/typedapi/security/samlauthenticate/saml_authenticate.go index a018863626..1d235eb349 100644 --- a/typedapi/security/samlauthenticate/saml_authenticate.go +++ b/typedapi/security/samlauthenticate/saml_authenticate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Exchanges a SAML Response message for an Elasticsearch access token and // refresh token pair diff --git a/typedapi/security/samlcompletelogout/request.go b/typedapi/security/samlcompletelogout/request.go index 5cc704afe9..f81e0b9de3 100644 --- a/typedapi/security/samlcompletelogout/request.go +++ b/typedapi/security/samlcompletelogout/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package samlcompletelogout @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package samlcompletelogout // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/saml_complete_logout/Request.ts#L23-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/saml_complete_logout/Request.ts#L23-L40 type Request struct { // Content If the SAML IdP sends the logout response with the HTTP-Post binding, this @@ -84,7 +84,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "content": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Content", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -99,20 +99,20 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Ids", err) } s.Ids = append(s.Ids, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Ids); err != nil { - return err + return fmt.Errorf("%s | %w", "Ids", err) } } case "query_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -124,7 +124,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "realm": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Realm", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/security/samlcompletelogout/saml_complete_logout.go b/typedapi/security/samlcompletelogout/saml_complete_logout.go index 8bc3ee1925..cf2e218b26 100644 --- a/typedapi/security/samlcompletelogout/saml_complete_logout.go +++ b/typedapi/security/samlcompletelogout/saml_complete_logout.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Verifies the logout response sent from the SAML IdP package samlcompletelogout diff --git a/typedapi/security/samlinvalidate/request.go b/typedapi/security/samlinvalidate/request.go index 8fcc9ad6b9..8b018dd0f5 100644 --- a/typedapi/security/samlinvalidate/request.go +++ b/typedapi/security/samlinvalidate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package samlinvalidate @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package samlinvalidate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/saml_invalidate/Request.ts#L22-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/saml_invalidate/Request.ts#L22-L43 type Request struct { // Acs The Assertion Consumer Service URL that matches the one of the SAML realm in diff --git a/typedapi/security/samlinvalidate/response.go b/typedapi/security/samlinvalidate/response.go index 90dcd50144..d74aa82957 100644 --- a/typedapi/security/samlinvalidate/response.go +++ b/typedapi/security/samlinvalidate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package samlinvalidate // Response holds the response body struct for the package samlinvalidate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/saml_invalidate/Response.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/saml_invalidate/Response.ts#L22-L28 type Response struct { Invalidated int `json:"invalidated"` Realm string `json:"realm"` diff --git a/typedapi/security/samlinvalidate/saml_invalidate.go b/typedapi/security/samlinvalidate/saml_invalidate.go index 5ae2d7d319..14a83556b9 100644 --- a/typedapi/security/samlinvalidate/saml_invalidate.go +++ b/typedapi/security/samlinvalidate/saml_invalidate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Consumes a SAML LogoutRequest package samlinvalidate diff --git a/typedapi/security/samllogout/request.go b/typedapi/security/samllogout/request.go index fb5729da1c..e985d9e9be 100644 --- a/typedapi/security/samllogout/request.go +++ b/typedapi/security/samllogout/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package samllogout @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package samllogout // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/saml_logout/Request.ts#L22-L41 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/saml_logout/Request.ts#L22-L41 type Request struct { // RefreshToken The refresh token that was returned as a response to calling the SAML diff --git a/typedapi/security/samllogout/response.go b/typedapi/security/samllogout/response.go index 16bc67f35b..faec684fd3 100644 --- a/typedapi/security/samllogout/response.go +++ b/typedapi/security/samllogout/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package samllogout // Response holds the response body struct for the package samllogout // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/saml_logout/Response.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/saml_logout/Response.ts#L20-L24 type Response struct { Redirect string `json:"redirect"` } diff --git a/typedapi/security/samllogout/saml_logout.go b/typedapi/security/samllogout/saml_logout.go index 02be4c0934..9836e79554 100644 --- a/typedapi/security/samllogout/saml_logout.go +++ b/typedapi/security/samllogout/saml_logout.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Invalidates an access token and a refresh token that were generated via the // SAML Authenticate API diff --git a/typedapi/security/samlprepareauthentication/request.go b/typedapi/security/samlprepareauthentication/request.go index 57041babfb..61b639f092 100644 --- a/typedapi/security/samlprepareauthentication/request.go +++ b/typedapi/security/samlprepareauthentication/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package samlprepareauthentication @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package samlprepareauthentication // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/saml_prepare_authentication/Request.ts#L22-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/saml_prepare_authentication/Request.ts#L22-L46 type Request struct { // Acs The Assertion Consumer Service URL that matches the one of the SAML realms in diff --git a/typedapi/security/samlprepareauthentication/response.go b/typedapi/security/samlprepareauthentication/response.go index 613be2ba28..c1a7a2973a 100644 --- a/typedapi/security/samlprepareauthentication/response.go +++ b/typedapi/security/samlprepareauthentication/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package samlprepareauthentication // Response holds the response body struct for the package samlprepareauthentication // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/saml_prepare_authentication/Response.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/saml_prepare_authentication/Response.ts#L22-L28 type Response struct { Id string `json:"id"` Realm string `json:"realm"` diff --git a/typedapi/security/samlprepareauthentication/saml_prepare_authentication.go b/typedapi/security/samlprepareauthentication/saml_prepare_authentication.go index 23d27ae99d..50436a94d9 100644 --- a/typedapi/security/samlprepareauthentication/saml_prepare_authentication.go +++ b/typedapi/security/samlprepareauthentication/saml_prepare_authentication.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a SAML authentication request package samlprepareauthentication diff --git a/typedapi/security/samlserviceprovidermetadata/response.go b/typedapi/security/samlserviceprovidermetadata/response.go index ad2a187993..f9190b4283 100644 --- a/typedapi/security/samlserviceprovidermetadata/response.go +++ b/typedapi/security/samlserviceprovidermetadata/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package samlserviceprovidermetadata // Response holds the response body struct for the package samlserviceprovidermetadata // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/saml_service_provider_metadata/Response.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/saml_service_provider_metadata/Response.ts#L20-L24 type Response struct { Metadata string `json:"metadata"` } diff --git a/typedapi/security/samlserviceprovidermetadata/saml_service_provider_metadata.go b/typedapi/security/samlserviceprovidermetadata/saml_service_provider_metadata.go index 5a7b440adf..b980f2842a 100644 --- a/typedapi/security/samlserviceprovidermetadata/saml_service_provider_metadata.go +++ b/typedapi/security/samlserviceprovidermetadata/saml_service_provider_metadata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Generates SAML metadata for the Elastic stack SAML 2.0 Service Provider package samlserviceprovidermetadata diff --git a/typedapi/security/suggestuserprofiles/request.go b/typedapi/security/suggestuserprofiles/request.go index c9e05276d3..615ad5bc59 100644 --- a/typedapi/security/suggestuserprofiles/request.go +++ b/typedapi/security/suggestuserprofiles/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package suggestuserprofiles @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package suggestuserprofiles // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/suggest_user_profiles/Request.ts#L24-L66 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/suggest_user_profiles/Request.ts#L24-L66 type Request struct { // Data List of filters for the `data` field of the profile document. @@ -91,25 +91,25 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Data", err) } s.Data = append(s.Data, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Data); err != nil { - return err + return fmt.Errorf("%s | %w", "Data", err) } } case "hint": if err := dec.Decode(&s.Hint); err != nil { - return err + return fmt.Errorf("%s | %w", "Hint", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -125,7 +125,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: diff --git a/typedapi/security/suggestuserprofiles/response.go b/typedapi/security/suggestuserprofiles/response.go index 133153659b..22120c5af5 100644 --- a/typedapi/security/suggestuserprofiles/response.go +++ b/typedapi/security/suggestuserprofiles/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package suggestuserprofiles @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package suggestuserprofiles // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/suggest_user_profiles/Response.ts#L29-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/suggest_user_profiles/Response.ts#L29-L35 type Response struct { Profiles []types.UserProfile `json:"profiles"` Took int64 `json:"took"` diff --git a/typedapi/security/suggestuserprofiles/suggest_user_profiles.go b/typedapi/security/suggestuserprofiles/suggest_user_profiles.go index a00925fe32..38c42336ba 100644 --- a/typedapi/security/suggestuserprofiles/suggest_user_profiles.go +++ b/typedapi/security/suggestuserprofiles/suggest_user_profiles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Get suggestions for user profiles that match specified search criteria. package suggestuserprofiles diff --git a/typedapi/security/updateapikey/request.go b/typedapi/security/updateapikey/request.go index 3dfe5232c1..9eb5649e66 100644 --- a/typedapi/security/updateapikey/request.go +++ b/typedapi/security/updateapikey/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updateapikey @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package updateapikey // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/update_api_key/Request.ts#L26-L65 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/update_api_key/Request.ts#L26-L65 type Request struct { // Expiration Expiration time for the API key. @@ -88,12 +88,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "expiration": if err := dec.Decode(&s.Expiration); err != nil { - return err + return fmt.Errorf("%s | %w", "Expiration", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "role_descriptors": @@ -101,7 +101,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.RoleDescriptors = make(map[string]types.RoleDescriptor, 0) } if err := dec.Decode(&s.RoleDescriptors); err != nil { - return err + return fmt.Errorf("%s | %w", "RoleDescriptors", err) } } diff --git a/typedapi/security/updateapikey/response.go b/typedapi/security/updateapikey/response.go index c42a4bea55..e963282ef0 100644 --- a/typedapi/security/updateapikey/response.go +++ b/typedapi/security/updateapikey/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updateapikey // Response holds the response body struct for the package updateapikey // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/update_api_key/Response.ts#L20-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/update_api_key/Response.ts#L20-L28 type Response struct { // Updated If `true`, the API key was updated. diff --git a/typedapi/security/updateapikey/update_api_key.go b/typedapi/security/updateapikey/update_api_key.go index 2fb9bbc9ff..17cb543cb1 100644 --- a/typedapi/security/updateapikey/update_api_key.go +++ b/typedapi/security/updateapikey/update_api_key.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates attributes of an existing API key. package updateapikey diff --git a/typedapi/security/updatesettings/update_settings.go b/typedapi/security/updatesettings/update_settings.go index 1bfb600821..9a53f53577 100644 --- a/typedapi/security/updatesettings/update_settings.go +++ b/typedapi/security/updatesettings/update_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Update settings for the security system index package updatesettings diff --git a/typedapi/security/updateuserprofiledata/request.go b/typedapi/security/updateuserprofiledata/request.go index 000ff2e943..f5a9e22ef3 100644 --- a/typedapi/security/updateuserprofiledata/request.go +++ b/typedapi/security/updateuserprofiledata/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updateuserprofiledata @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package updateuserprofiledata // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/update_user_profile_data/Request.ts#L27-L70 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/update_user_profile_data/Request.ts#L27-L70 type Request struct { // Data Non-searchable data that you want to associate with the user profile. diff --git a/typedapi/security/updateuserprofiledata/response.go b/typedapi/security/updateuserprofiledata/response.go index 570f98fd84..ebc22766b1 100644 --- a/typedapi/security/updateuserprofiledata/response.go +++ b/typedapi/security/updateuserprofiledata/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updateuserprofiledata // Response holds the response body struct for the package updateuserprofiledata // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/update_user_profile_data/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/update_user_profile_data/Response.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/security/updateuserprofiledata/update_user_profile_data.go b/typedapi/security/updateuserprofiledata/update_user_profile_data.go index 9a7a4ac4d5..3ff2bb5161 100644 --- a/typedapi/security/updateuserprofiledata/update_user_profile_data.go +++ b/typedapi/security/updateuserprofiledata/update_user_profile_data.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Update application specific data for the user profile of the given unique ID. package updateuserprofiledata diff --git a/typedapi/shutdown/deletenode/delete_node.go b/typedapi/shutdown/deletenode/delete_node.go index 34e665fb02..e56873f725 100644 --- a/typedapi/shutdown/deletenode/delete_node.go +++ b/typedapi/shutdown/deletenode/delete_node.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Removes a node from the shutdown list. Designed for indirect use by ECE/ESS // and ECK. Direct use is not supported. diff --git a/typedapi/shutdown/deletenode/response.go b/typedapi/shutdown/deletenode/response.go index 5520307043..1829aa3735 100644 --- a/typedapi/shutdown/deletenode/response.go +++ b/typedapi/shutdown/deletenode/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletenode // Response holds the response body struct for the package deletenode // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/shutdown/delete_node/ShutdownDeleteNodeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/shutdown/delete_node/ShutdownDeleteNodeResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/shutdown/getnode/get_node.go b/typedapi/shutdown/getnode/get_node.go index 75e91530f8..2ed5ed9598 100644 --- a/typedapi/shutdown/getnode/get_node.go +++ b/typedapi/shutdown/getnode/get_node.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieve status of a node or nodes that are currently marked as shutting // down. Designed for indirect use by ECE/ESS and ECK. Direct use is not diff --git a/typedapi/shutdown/getnode/response.go b/typedapi/shutdown/getnode/response.go index 60e82b6967..9ab302767c 100644 --- a/typedapi/shutdown/getnode/response.go +++ b/typedapi/shutdown/getnode/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getnode @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getnode // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L23-L27 type Response struct { Nodes []types.NodeShutdownStatus `json:"nodes"` } diff --git a/typedapi/shutdown/putnode/put_node.go b/typedapi/shutdown/putnode/put_node.go index 7d8054cb81..a878cd9976 100644 --- a/typedapi/shutdown/putnode/put_node.go +++ b/typedapi/shutdown/putnode/put_node.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Adds a node to be shut down. Designed for indirect use by ECE/ESS and ECK. // Direct use is not supported. diff --git a/typedapi/shutdown/putnode/request.go b/typedapi/shutdown/putnode/request.go index 2d8b53fab4..ac122502f2 100644 --- a/typedapi/shutdown/putnode/request.go +++ b/typedapi/shutdown/putnode/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putnode @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putnode // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/shutdown/put_node/ShutdownPutNodeRequest.ts#L25-L76 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/shutdown/put_node/ShutdownPutNodeRequest.ts#L25-L76 type Request struct { // AllocationDelay Only valid if type is restart. diff --git a/typedapi/shutdown/putnode/response.go b/typedapi/shutdown/putnode/response.go index 740a49e773..ced8b56aa5 100644 --- a/typedapi/shutdown/putnode/response.go +++ b/typedapi/shutdown/putnode/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putnode // Response holds the response body struct for the package putnode // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/shutdown/put_node/ShutdownPutNodeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/shutdown/put_node/ShutdownPutNodeResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/slm/deletelifecycle/delete_lifecycle.go b/typedapi/slm/deletelifecycle/delete_lifecycle.go index ccaa981c05..002e6eb4ea 100644 --- a/typedapi/slm/deletelifecycle/delete_lifecycle.go +++ b/typedapi/slm/deletelifecycle/delete_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an existing snapshot lifecycle policy. package deletelifecycle diff --git a/typedapi/slm/deletelifecycle/response.go b/typedapi/slm/deletelifecycle/response.go index e25ff4191b..b108602bd8 100644 --- a/typedapi/slm/deletelifecycle/response.go +++ b/typedapi/slm/deletelifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletelifecycle // Response holds the response body struct for the package deletelifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/delete_lifecycle/DeleteSnapshotLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/delete_lifecycle/DeleteSnapshotLifecycleResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/slm/executelifecycle/execute_lifecycle.go b/typedapi/slm/executelifecycle/execute_lifecycle.go index 7dce54cb66..8a68971356 100644 --- a/typedapi/slm/executelifecycle/execute_lifecycle.go +++ b/typedapi/slm/executelifecycle/execute_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Immediately creates a snapshot according to the lifecycle policy, without // waiting for the scheduled time. diff --git a/typedapi/slm/executelifecycle/response.go b/typedapi/slm/executelifecycle/response.go index e502f4f1e0..46df781ec4 100644 --- a/typedapi/slm/executelifecycle/response.go +++ b/typedapi/slm/executelifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package executelifecycle // Response holds the response body struct for the package executelifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/execute_lifecycle/ExecuteSnapshotLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/execute_lifecycle/ExecuteSnapshotLifecycleResponse.ts#L22-L24 type Response struct { SnapshotName string `json:"snapshot_name"` } diff --git a/typedapi/slm/executeretention/execute_retention.go b/typedapi/slm/executeretention/execute_retention.go index cae931ad69..f907795e7e 100644 --- a/typedapi/slm/executeretention/execute_retention.go +++ b/typedapi/slm/executeretention/execute_retention.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes any snapshots that are expired according to the policy's retention // rules. diff --git a/typedapi/slm/executeretention/response.go b/typedapi/slm/executeretention/response.go index 8be792b870..4a4f09e7d1 100644 --- a/typedapi/slm/executeretention/response.go +++ b/typedapi/slm/executeretention/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package executeretention // Response holds the response body struct for the package executeretention // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/execute_retention/ExecuteRetentionResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/execute_retention/ExecuteRetentionResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/slm/getlifecycle/get_lifecycle.go b/typedapi/slm/getlifecycle/get_lifecycle.go index a54a038932..2c42339856 100644 --- a/typedapi/slm/getlifecycle/get_lifecycle.go +++ b/typedapi/slm/getlifecycle/get_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves one or more snapshot lifecycle policy definitions and information // about the latest snapshot attempts. diff --git a/typedapi/slm/getlifecycle/response.go b/typedapi/slm/getlifecycle/response.go index 206501ed22..c1a10133c5 100644 --- a/typedapi/slm/getlifecycle/response.go +++ b/typedapi/slm/getlifecycle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getlifecycle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/get_lifecycle/GetSnapshotLifecycleResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/get_lifecycle/GetSnapshotLifecycleResponse.ts#L24-L26 type Response map[string]types.SnapshotLifecycle diff --git a/typedapi/slm/getstats/get_stats.go b/typedapi/slm/getstats/get_stats.go index 77922c3b45..d174e5f0d6 100644 --- a/typedapi/slm/getstats/get_stats.go +++ b/typedapi/slm/getstats/get_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns global and policy-level statistics about actions taken by snapshot // lifecycle management. diff --git a/typedapi/slm/getstats/response.go b/typedapi/slm/getstats/response.go index 78141e1109..9262e78322 100644 --- a/typedapi/slm/getstats/response.go +++ b/typedapi/slm/getstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getstats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/get_stats/GetSnapshotLifecycleStatsResponse.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/get_stats/GetSnapshotLifecycleStatsResponse.ts#L23-L36 type Response struct { PolicyStats []string `json:"policy_stats"` RetentionDeletionTime types.Duration `json:"retention_deletion_time"` diff --git a/typedapi/slm/getstatus/get_status.go b/typedapi/slm/getstatus/get_status.go index b9bffb4c8e..47d48987e2 100644 --- a/typedapi/slm/getstatus/get_status.go +++ b/typedapi/slm/getstatus/get_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves the status of snapshot lifecycle management (SLM). package getstatus diff --git a/typedapi/slm/getstatus/response.go b/typedapi/slm/getstatus/response.go index 099fc76be0..563524b047 100644 --- a/typedapi/slm/getstatus/response.go +++ b/typedapi/slm/getstatus/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getstatus @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getstatus // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/get_status/GetSnapshotLifecycleManagementStatusResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/get_status/GetSnapshotLifecycleManagementStatusResponse.ts#L22-L24 type Response struct { OperationMode lifecycleoperationmode.LifecycleOperationMode `json:"operation_mode"` } diff --git a/typedapi/slm/putlifecycle/put_lifecycle.go b/typedapi/slm/putlifecycle/put_lifecycle.go index ea81777ae1..399869b255 100644 --- a/typedapi/slm/putlifecycle/put_lifecycle.go +++ b/typedapi/slm/putlifecycle/put_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates or updates a snapshot lifecycle policy. package putlifecycle diff --git a/typedapi/slm/putlifecycle/request.go b/typedapi/slm/putlifecycle/request.go index 5d2a99874b..9f452fbf9d 100644 --- a/typedapi/slm/putlifecycle/request.go +++ b/typedapi/slm/putlifecycle/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putlifecycle @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/put_lifecycle/PutSnapshotLifecycleRequest.ts#L26-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/put_lifecycle/PutSnapshotLifecycleRequest.ts#L26-L72 type Request struct { // Config Configuration for each snapshot created by the policy. @@ -87,18 +87,18 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "config": if err := dec.Decode(&s.Config); err != nil { - return err + return fmt.Errorf("%s | %w", "Config", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "repository": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Repository", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,12 +109,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "retention": if err := dec.Decode(&s.Retention); err != nil { - return err + return fmt.Errorf("%s | %w", "Retention", err) } case "schedule": if err := dec.Decode(&s.Schedule); err != nil { - return err + return fmt.Errorf("%s | %w", "Schedule", err) } } diff --git a/typedapi/slm/putlifecycle/response.go b/typedapi/slm/putlifecycle/response.go index ffdaf94262..89163c2ba9 100644 --- a/typedapi/slm/putlifecycle/response.go +++ b/typedapi/slm/putlifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putlifecycle // Response holds the response body struct for the package putlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/put_lifecycle/PutSnapshotLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/put_lifecycle/PutSnapshotLifecycleResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/slm/start/response.go b/typedapi/slm/start/response.go index da5b3d2e1a..709fdc8466 100644 --- a/typedapi/slm/start/response.go +++ b/typedapi/slm/start/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package start // Response holds the response body struct for the package start // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/start/StartSnapshotLifecycleManagementResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/start/StartSnapshotLifecycleManagementResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/slm/start/start.go b/typedapi/slm/start/start.go index d3bf909807..b4c90b78e6 100644 --- a/typedapi/slm/start/start.go +++ b/typedapi/slm/start/start.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Turns on snapshot lifecycle management (SLM). package start diff --git a/typedapi/slm/stop/response.go b/typedapi/slm/stop/response.go index a68f098788..29f26ae065 100644 --- a/typedapi/slm/stop/response.go +++ b/typedapi/slm/stop/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stop // Response holds the response body struct for the package stop // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/stop/StopSnapshotLifecycleManagementResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/stop/StopSnapshotLifecycleManagementResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/slm/stop/stop.go b/typedapi/slm/stop/stop.go index 144dafba95..d2db514887 100644 --- a/typedapi/slm/stop/stop.go +++ b/typedapi/slm/stop/stop.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Turns off snapshot lifecycle management (SLM). package stop diff --git a/typedapi/snapshot/cleanuprepository/cleanup_repository.go b/typedapi/snapshot/cleanuprepository/cleanup_repository.go index 574e3e2142..c32d554a4d 100644 --- a/typedapi/snapshot/cleanuprepository/cleanup_repository.go +++ b/typedapi/snapshot/cleanuprepository/cleanup_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Removes stale data from repository. package cleanuprepository diff --git a/typedapi/snapshot/cleanuprepository/response.go b/typedapi/snapshot/cleanuprepository/response.go index 4f0a54554e..31b3b719d5 100644 --- a/typedapi/snapshot/cleanuprepository/response.go +++ b/typedapi/snapshot/cleanuprepository/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package cleanuprepository @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package cleanuprepository // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/cleanup_repository/SnapshotCleanupRepositoryResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/cleanup_repository/SnapshotCleanupRepositoryResponse.ts#L22-L27 type Response struct { // Results Statistics for cleanup operations. diff --git a/typedapi/snapshot/clone/clone.go b/typedapi/snapshot/clone/clone.go index 902ac6ad1f..fa8d982e97 100644 --- a/typedapi/snapshot/clone/clone.go +++ b/typedapi/snapshot/clone/clone.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Clones indices from one snapshot into another snapshot in the same // repository. diff --git a/typedapi/snapshot/clone/request.go b/typedapi/snapshot/clone/request.go index 1fb398d88d..816252edb0 100644 --- a/typedapi/snapshot/clone/request.go +++ b/typedapi/snapshot/clone/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clone @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package clone // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/clone/SnapshotCloneRequest.ts#L24-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/clone/SnapshotCloneRequest.ts#L24-L42 type Request struct { Indices string `json:"indices"` } diff --git a/typedapi/snapshot/clone/response.go b/typedapi/snapshot/clone/response.go index 129f9165a7..4bc573cbc2 100644 --- a/typedapi/snapshot/clone/response.go +++ b/typedapi/snapshot/clone/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clone // Response holds the response body struct for the package clone // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/clone/SnapshotCloneResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/clone/SnapshotCloneResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/snapshot/create/create.go b/typedapi/snapshot/create/create.go index bc917a7b57..a0120b8dc5 100644 --- a/typedapi/snapshot/create/create.go +++ b/typedapi/snapshot/create/create.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a snapshot in a repository. package create diff --git a/typedapi/snapshot/create/request.go b/typedapi/snapshot/create/request.go index e4e43f1b6a..bbd01834ea 100644 --- a/typedapi/snapshot/create/request.go +++ b/typedapi/snapshot/create/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package create @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/create/SnapshotCreateRequest.ts#L24-L81 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/create/SnapshotCreateRequest.ts#L24-L81 type Request struct { // FeatureStates Feature states to include in the snapshot. Each feature state includes one or @@ -100,7 +100,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "feature_states": if err := dec.Decode(&s.FeatureStates); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureStates", err) } case "ignore_unavailable": @@ -110,7 +110,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnavailable", err) } s.IgnoreUnavailable = &value case bool: @@ -124,7 +124,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeGlobalState", err) } s.IncludeGlobalState = &value case bool: @@ -137,19 +137,19 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "partial": @@ -159,7 +159,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Partial", err) } s.Partial = &value case bool: diff --git a/typedapi/snapshot/create/response.go b/typedapi/snapshot/create/response.go index 030c571cc3..1c5c881f70 100644 --- a/typedapi/snapshot/create/response.go +++ b/typedapi/snapshot/create/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package create @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/create/SnapshotCreateResponse.ts#L22-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/create/SnapshotCreateResponse.ts#L22-L35 type Response struct { // Accepted Equals `true` if the snapshot was accepted. Present when the request had diff --git a/typedapi/snapshot/createrepository/create_repository.go b/typedapi/snapshot/createrepository/create_repository.go index 60f0e2fd3a..3fd8544ce9 100644 --- a/typedapi/snapshot/createrepository/create_repository.go +++ b/typedapi/snapshot/createrepository/create_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a repository. package createrepository @@ -91,8 +91,6 @@ func New(tp elastictransport.Interface) *CreateRepository { headers: make(http.Header), buf: gobytes.NewBuffer(nil), - - req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -336,19 +334,3 @@ func (r *CreateRepository) Verify(verify bool) *CreateRepository { return r } - -// API name: settings -func (r *CreateRepository) Settings(settings *types.RepositorySettings) *CreateRepository { - - r.req.Settings = *settings - - return r -} - -// API name: type -func (r *CreateRepository) Type(type_ string) *CreateRepository { - - r.req.Type = type_ - - return r -} diff --git a/typedapi/snapshot/createrepository/request.go b/typedapi/snapshot/createrepository/request.go index 2a3f1a9bb1..9cd853b440 100644 --- a/typedapi/snapshot/createrepository/request.go +++ b/typedapi/snapshot/createrepository/request.go @@ -16,40 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package createrepository import ( - "encoding/json" - "fmt" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // Request holds the request body struct for the package createrepository // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/create_repository/SnapshotCreateRepositoryRequest.ts#L28-L48 -type Request struct { - Repository *types.Repository `json:"repository,omitempty"` - Settings types.RepositorySettings `json:"settings"` - Type string `json:"type"` -} - -// NewRequest returns a Request -func NewRequest() *Request { - r := &Request{} - return r -} - -// FromJSON allows to load an arbitrary json into the request structure -func (r *Request) FromJSON(data string) (*Request, error) { - var req Request - err := json.Unmarshal([]byte(data), &req) - - if err != nil { - return nil, fmt.Errorf("could not deserialise json into Createrepository request: %w", err) - } - - return &req, nil -} +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/create_repository/SnapshotCreateRepositoryRequest.ts#L25-L42 +type Request = types.Repository diff --git a/typedapi/snapshot/createrepository/response.go b/typedapi/snapshot/createrepository/response.go index 0a4e3b7ad4..6dba74abb9 100644 --- a/typedapi/snapshot/createrepository/response.go +++ b/typedapi/snapshot/createrepository/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package createrepository // Response holds the response body struct for the package createrepository // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/create_repository/SnapshotCreateRepositoryResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/create_repository/SnapshotCreateRepositoryResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/snapshot/delete/delete.go b/typedapi/snapshot/delete/delete.go index 73c030b838..45b2135dd1 100644 --- a/typedapi/snapshot/delete/delete.go +++ b/typedapi/snapshot/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes one or more snapshots. package delete diff --git a/typedapi/snapshot/delete/response.go b/typedapi/snapshot/delete/response.go index d9ffb59ec7..f6c0644d42 100644 --- a/typedapi/snapshot/delete/response.go +++ b/typedapi/snapshot/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/delete/SnapshotDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/delete/SnapshotDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/snapshot/deleterepository/delete_repository.go b/typedapi/snapshot/deleterepository/delete_repository.go index 0e6b5ca59a..d6bc65811b 100644 --- a/typedapi/snapshot/deleterepository/delete_repository.go +++ b/typedapi/snapshot/deleterepository/delete_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes a repository. package deleterepository diff --git a/typedapi/snapshot/deleterepository/response.go b/typedapi/snapshot/deleterepository/response.go index 130f570bd9..4f308ebff5 100644 --- a/typedapi/snapshot/deleterepository/response.go +++ b/typedapi/snapshot/deleterepository/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deleterepository // Response holds the response body struct for the package deleterepository // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/delete_repository/SnapshotDeleteRepositoryResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/delete_repository/SnapshotDeleteRepositoryResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/snapshot/get/get.go b/typedapi/snapshot/get/get.go index 319b7c71ef..ffb0abb365 100644 --- a/typedapi/snapshot/get/get.go +++ b/typedapi/snapshot/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about a snapshot. package get diff --git a/typedapi/snapshot/get/response.go b/typedapi/snapshot/get/response.go index fa3130ef87..75adefec13 100644 --- a/typedapi/snapshot/get/response.go +++ b/typedapi/snapshot/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/get/SnapshotGetResponse.ts#L25-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/get/SnapshotGetResponse.ts#L25-L42 type Response struct { // Remaining The number of remaining snapshots that were not returned due to size limits diff --git a/typedapi/snapshot/getrepository/get_repository.go b/typedapi/snapshot/getrepository/get_repository.go index d02367caa4..172a824e31 100644 --- a/typedapi/snapshot/getrepository/get_repository.go +++ b/typedapi/snapshot/getrepository/get_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about a repository. package getrepository diff --git a/typedapi/snapshot/getrepository/response.go b/typedapi/snapshot/getrepository/response.go index c38f994fb2..04cb4998e5 100644 --- a/typedapi/snapshot/getrepository/response.go +++ b/typedapi/snapshot/getrepository/response.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getrepository import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // Response holds the response body struct for the package getrepository // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/get_repository/SnapshotGetRepositoryResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/get_repository/SnapshotGetRepositoryResponse.ts#L23-L25 type Response map[string]types.Repository @@ -35,3 +41,98 @@ func NewResponse() Response { r := make(Response, 0) return r } + +func (r Response) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + o := make(map[string]interface{}, 0) + dec.Decode(&o) + dec = json.NewDecoder(bytes.NewReader(data)) + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + key := fmt.Sprintf("%s", t) + if target, ok := o[key]; ok { + if t, ok := target.(map[string]interface{})["type"]; ok { + + switch t { + + case "azure": + oo := types.NewAzureRepository() + err := dec.Decode(&oo) + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + r[key] = oo + + case "gcs": + oo := types.NewGcsRepository() + err := dec.Decode(&oo) + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + r[key] = oo + + case "s3": + oo := types.NewS3Repository() + err := dec.Decode(&oo) + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + r[key] = oo + + case "fs": + oo := types.NewSharedFileSystemRepository() + err := dec.Decode(&oo) + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + r[key] = oo + + case "url": + oo := types.NewReadOnlyUrlRepository() + err := dec.Decode(&oo) + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + r[key] = oo + + case "source": + oo := types.NewSourceOnlyRepository() + err := dec.Decode(&oo) + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + r[key] = oo + + } + } + } + + } + return nil +} diff --git a/typedapi/snapshot/restore/request.go b/typedapi/snapshot/restore/request.go index 10e96cdcb6..aa5b357bc5 100644 --- a/typedapi/snapshot/restore/request.go +++ b/typedapi/snapshot/restore/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package restore @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package restore // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/restore/SnapshotRestoreRequest.ts#L25-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/restore/SnapshotRestoreRequest.ts#L25-L51 type Request struct { FeatureStates []string `json:"feature_states,omitempty"` IgnoreIndexSettings []string `json:"ignore_index_settings,omitempty"` @@ -81,12 +81,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "feature_states": if err := dec.Decode(&s.FeatureStates); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureStates", err) } case "ignore_index_settings": if err := dec.Decode(&s.IgnoreIndexSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreIndexSettings", err) } case "ignore_unavailable": @@ -96,7 +96,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnavailable", err) } s.IgnoreUnavailable = &value case bool: @@ -110,7 +110,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeAliases", err) } s.IncludeAliases = &value case bool: @@ -124,7 +124,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeGlobalState", err) } s.IncludeGlobalState = &value case bool: @@ -133,7 +133,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "index_settings": if err := dec.Decode(&s.IndexSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexSettings", err) } case "indices": @@ -142,13 +142,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } @@ -159,7 +159,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Partial", err) } s.Partial = &value case bool: @@ -169,7 +169,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "rename_pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RenamePattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -181,7 +181,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "rename_replacement": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RenameReplacement", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/snapshot/restore/response.go b/typedapi/snapshot/restore/response.go index 8dfe4dbfb4..506ef62e1e 100644 --- a/typedapi/snapshot/restore/response.go +++ b/typedapi/snapshot/restore/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package restore @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package restore // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/restore/SnapshotRestoreResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/restore/SnapshotRestoreResponse.ts#L23-L25 type Response struct { Snapshot types.SnapshotRestore `json:"snapshot"` } diff --git a/typedapi/snapshot/restore/restore.go b/typedapi/snapshot/restore/restore.go index f40f8f9e72..5c05a313a5 100644 --- a/typedapi/snapshot/restore/restore.go +++ b/typedapi/snapshot/restore/restore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Restores a snapshot. package restore diff --git a/typedapi/snapshot/status/response.go b/typedapi/snapshot/status/response.go index 39a2444af7..f274257354 100644 --- a/typedapi/snapshot/status/response.go +++ b/typedapi/snapshot/status/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package status @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package status // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/status/SnapshotStatusResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/status/SnapshotStatusResponse.ts#L22-L24 type Response struct { Snapshots []types.Status `json:"snapshots"` } diff --git a/typedapi/snapshot/status/status.go b/typedapi/snapshot/status/status.go index f5ed7cc307..b693ac56b5 100644 --- a/typedapi/snapshot/status/status.go +++ b/typedapi/snapshot/status/status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about the status of a snapshot. package status diff --git a/typedapi/snapshot/verifyrepository/response.go b/typedapi/snapshot/verifyrepository/response.go index 10fa80a85b..4423ac2aa6 100644 --- a/typedapi/snapshot/verifyrepository/response.go +++ b/typedapi/snapshot/verifyrepository/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package verifyrepository @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package verifyrepository // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/verify_repository/SnapshotVerifyRepositoryResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/verify_repository/SnapshotVerifyRepositoryResponse.ts#L23-L25 type Response struct { Nodes map[string]types.CompactNodeInfo `json:"nodes"` } diff --git a/typedapi/snapshot/verifyrepository/verify_repository.go b/typedapi/snapshot/verifyrepository/verify_repository.go index dbf20e2199..6d5dcc0cd8 100644 --- a/typedapi/snapshot/verifyrepository/verify_repository.go +++ b/typedapi/snapshot/verifyrepository/verify_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Verifies a repository. package verifyrepository diff --git a/typedapi/sql/clearcursor/clear_cursor.go b/typedapi/sql/clearcursor/clear_cursor.go index 2a92496a3c..1d0d6868c6 100644 --- a/typedapi/sql/clearcursor/clear_cursor.go +++ b/typedapi/sql/clearcursor/clear_cursor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Clears the SQL cursor package clearcursor diff --git a/typedapi/sql/clearcursor/request.go b/typedapi/sql/clearcursor/request.go index 3e25ba7590..690c00ddce 100644 --- a/typedapi/sql/clearcursor/request.go +++ b/typedapi/sql/clearcursor/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clearcursor @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package clearcursor // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/sql/clear_cursor/ClearSqlCursorRequest.ts#L22-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/sql/clear_cursor/ClearSqlCursorRequest.ts#L22-L34 type Request struct { // Cursor Cursor to clear. diff --git a/typedapi/sql/clearcursor/response.go b/typedapi/sql/clearcursor/response.go index b36a7be315..37bf3171ae 100644 --- a/typedapi/sql/clearcursor/response.go +++ b/typedapi/sql/clearcursor/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package clearcursor // Response holds the response body struct for the package clearcursor // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/sql/clear_cursor/ClearSqlCursorResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/sql/clear_cursor/ClearSqlCursorResponse.ts#L20-L22 type Response struct { Succeeded bool `json:"succeeded"` } diff --git a/typedapi/sql/deleteasync/delete_async.go b/typedapi/sql/deleteasync/delete_async.go index a95d6ead43..11da4ac9f8 100644 --- a/typedapi/sql/deleteasync/delete_async.go +++ b/typedapi/sql/deleteasync/delete_async.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an async SQL search or a stored synchronous SQL search. If the search // is still running, the API cancels it. diff --git a/typedapi/sql/deleteasync/response.go b/typedapi/sql/deleteasync/response.go index 3fea20bad0..fc8163134b 100644 --- a/typedapi/sql/deleteasync/response.go +++ b/typedapi/sql/deleteasync/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deleteasync // Response holds the response body struct for the package deleteasync // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/sql/delete_async/SqlDeleteAsyncResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/sql/delete_async/SqlDeleteAsyncResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/sql/getasync/get_async.go b/typedapi/sql/getasync/get_async.go index c8a09dd1ef..b421ef1688 100644 --- a/typedapi/sql/getasync/get_async.go +++ b/typedapi/sql/getasync/get_async.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the current status and available results for an async SQL search or // stored synchronous SQL search diff --git a/typedapi/sql/getasync/response.go b/typedapi/sql/getasync/response.go index 8c9e9a050d..db848540db 100644 --- a/typedapi/sql/getasync/response.go +++ b/typedapi/sql/getasync/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getasync @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package getasync // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/sql/get_async/SqlGetAsyncResponse.ts#L23-L60 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/sql/get_async/SqlGetAsyncResponse.ts#L23-L60 type Response struct { // Columns Column headings for the search results. Each object is a column. diff --git a/typedapi/sql/getasyncstatus/get_async_status.go b/typedapi/sql/getasyncstatus/get_async_status.go index 918b3ef4f3..3700e621f6 100644 --- a/typedapi/sql/getasyncstatus/get_async_status.go +++ b/typedapi/sql/getasyncstatus/get_async_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns the current status of an async SQL search or a stored synchronous SQL // search diff --git a/typedapi/sql/getasyncstatus/response.go b/typedapi/sql/getasyncstatus/response.go index cbce9ab26b..fe3444dacd 100644 --- a/typedapi/sql/getasyncstatus/response.go +++ b/typedapi/sql/getasyncstatus/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getasyncstatus // Response holds the response body struct for the package getasyncstatus // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/sql/get_async_status/SqlGetAsyncStatusResponse.ts#L23-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/sql/get_async_status/SqlGetAsyncStatusResponse.ts#L23-L55 type Response struct { // CompletionStatus HTTP status code for the search. The API only returns this property for diff --git a/typedapi/sql/query/query.go b/typedapi/sql/query/query.go index 9f93b3f6bf..cc59b5ca19 100644 --- a/typedapi/sql/query/query.go +++ b/typedapi/sql/query/query.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Executes a SQL request package query diff --git a/typedapi/sql/query/request.go b/typedapi/sql/query/request.go index d140f370fb..76ca3c19c1 100644 --- a/typedapi/sql/query/request.go +++ b/typedapi/sql/query/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package query @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package query // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/sql/query/QuerySqlRequest.ts#L28-L122 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/sql/query/QuerySqlRequest.ts#L28-L122 type Request struct { // Catalog Default catalog (cluster) for queries. If unspecified, the queries execute on @@ -119,7 +119,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "catalog": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Catalog", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -135,7 +135,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Columnar", err) } s.Columnar = &value case bool: @@ -145,7 +145,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "cursor": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Cursor", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -162,7 +162,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FetchSize", err) } s.FetchSize = &value case float64: @@ -177,7 +177,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FieldMultiValueLeniency", err) } s.FieldMultiValueLeniency = &value case bool: @@ -186,7 +186,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "index_using_frozen": @@ -196,7 +196,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexUsingFrozen", err) } s.IndexUsingFrozen = &value case bool: @@ -205,7 +205,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "keep_alive": if err := dec.Decode(&s.KeepAlive); err != nil { - return err + return fmt.Errorf("%s | %w", "KeepAlive", err) } case "keep_on_completion": @@ -215,7 +215,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "KeepOnCompletion", err) } s.KeepOnCompletion = &value case bool: @@ -224,7 +224,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "page_timeout": if err := dec.Decode(&s.PageTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "PageTimeout", err) } case "params": @@ -232,13 +232,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -249,22 +249,22 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "request_timeout": if err := dec.Decode(&s.RequestTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "RequestTimeout", err) } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } case "time_zone": if err := dec.Decode(&s.TimeZone); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeZone", err) } case "wait_for_completion_timeout": if err := dec.Decode(&s.WaitForCompletionTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "WaitForCompletionTimeout", err) } } diff --git a/typedapi/sql/query/response.go b/typedapi/sql/query/response.go index 7d32c7cef5..7b84382687 100644 --- a/typedapi/sql/query/response.go +++ b/typedapi/sql/query/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package query @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package query // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/sql/query/QuerySqlResponse.ts#L23-L60 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/sql/query/QuerySqlResponse.ts#L23-L60 type Response struct { // Columns Column headings for the search results. Each object is a column. diff --git a/typedapi/sql/translate/request.go b/typedapi/sql/translate/request.go index 90e261c271..cc8a0e0877 100644 --- a/typedapi/sql/translate/request.go +++ b/typedapi/sql/translate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package translate @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package translate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/sql/translate/TranslateSqlRequest.ts#L25-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/sql/translate/TranslateSqlRequest.ts#L25-L54 type Request struct { // FetchSize The maximum number of rows (or entries) to return in one response. @@ -86,7 +86,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FetchSize", err) } s.FetchSize = &value case float64: @@ -96,13 +96,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -113,7 +113,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "time_zone": if err := dec.Decode(&s.TimeZone); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeZone", err) } } diff --git a/typedapi/sql/translate/response.go b/typedapi/sql/translate/response.go index ee134fe9c5..ba324fd2d6 100644 --- a/typedapi/sql/translate/response.go +++ b/typedapi/sql/translate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package translate @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // Response holds the response body struct for the package translate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/sql/translate/TranslateSqlResponse.ts#L28-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/sql/translate/TranslateSqlResponse.ts#L28-L38 type Response struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Fields []types.FieldAndFormat `json:"fields,omitempty"` @@ -69,17 +70,17 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Aggregations = make(map[string]types.Aggregations, 0) } if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "fields": if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "size": @@ -89,7 +90,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -103,19 +104,19 @@ func (s *Response) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(types.SortCombinations) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } s.Sort = append(s.Sort, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } } diff --git a/typedapi/sql/translate/translate.go b/typedapi/sql/translate/translate.go index 754dd9dc17..431c696f1d 100644 --- a/typedapi/sql/translate/translate.go +++ b/typedapi/sql/translate/translate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Translates SQL into Elasticsearch queries package translate diff --git a/typedapi/ssl/certificates/certificates.go b/typedapi/ssl/certificates/certificates.go index 093a072361..5e43de69ec 100644 --- a/typedapi/ssl/certificates/certificates.go +++ b/typedapi/ssl/certificates/certificates.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information about the X.509 certificates used to encrypt // communications in the cluster. diff --git a/typedapi/ssl/certificates/response.go b/typedapi/ssl/certificates/response.go index f5e9aaadd6..56b1b83a1f 100644 --- a/typedapi/ssl/certificates/response.go +++ b/typedapi/ssl/certificates/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package certificates @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package certificates // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ssl/certificates/GetCertificatesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ssl/certificates/GetCertificatesResponse.ts#L22-L24 type Response []types.CertificateInformation diff --git a/typedapi/synonyms/deletesynonym/delete_synonym.go b/typedapi/synonyms/deletesynonym/delete_synonym.go index 4c8be31ee6..fc6ac912b1 100644 --- a/typedapi/synonyms/deletesynonym/delete_synonym.go +++ b/typedapi/synonyms/deletesynonym/delete_synonym.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes a synonym set package deletesynonym diff --git a/typedapi/synonyms/deletesynonym/response.go b/typedapi/synonyms/deletesynonym/response.go index f65ecfdcd0..25ebd0b0d9 100644 --- a/typedapi/synonyms/deletesynonym/response.go +++ b/typedapi/synonyms/deletesynonym/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletesynonym // Response holds the response body struct for the package deletesynonym // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/synonyms/delete_synonym/SynonymsDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/synonyms/delete_synonym/SynonymsDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/synonyms/deletesynonymrule/delete_synonym_rule.go b/typedapi/synonyms/deletesynonymrule/delete_synonym_rule.go index d7968e2e0d..6ca8cec411 100644 --- a/typedapi/synonyms/deletesynonymrule/delete_synonym_rule.go +++ b/typedapi/synonyms/deletesynonymrule/delete_synonym_rule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes a synonym rule in a synonym set package deletesynonymrule diff --git a/typedapi/synonyms/deletesynonymrule/response.go b/typedapi/synonyms/deletesynonymrule/response.go index c524de2373..5faac91353 100644 --- a/typedapi/synonyms/deletesynonymrule/response.go +++ b/typedapi/synonyms/deletesynonymrule/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletesynonymrule @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package deletesynonymrule // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/synonyms/delete_synonym_rule/SynonymRuleDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/synonyms/delete_synonym_rule/SynonymRuleDeleteResponse.ts#L22-L24 type Response struct { // ReloadAnalyzersDetails Updating synonyms in a synonym set reloads the associated analyzers. diff --git a/typedapi/synonyms/getsynonym/get_synonym.go b/typedapi/synonyms/getsynonym/get_synonym.go index 59957ad150..b3073dfdaa 100644 --- a/typedapi/synonyms/getsynonym/get_synonym.go +++ b/typedapi/synonyms/getsynonym/get_synonym.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves a synonym set package getsynonym diff --git a/typedapi/synonyms/getsynonym/response.go b/typedapi/synonyms/getsynonym/response.go index 9ab5052dfa..11d76dc16f 100644 --- a/typedapi/synonyms/getsynonym/response.go +++ b/typedapi/synonyms/getsynonym/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getsynonym @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getsynonym // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/synonyms/get_synonym/SynonymsGetResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/synonyms/get_synonym/SynonymsGetResponse.ts#L23-L28 type Response struct { Count int `json:"count"` SynonymsSet []types.SynonymRuleRead `json:"synonyms_set"` diff --git a/typedapi/synonyms/getsynonymrule/get_synonym_rule.go b/typedapi/synonyms/getsynonymrule/get_synonym_rule.go index 547de06b2d..90c330a40f 100644 --- a/typedapi/synonyms/getsynonymrule/get_synonym_rule.go +++ b/typedapi/synonyms/getsynonymrule/get_synonym_rule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves a synonym rule from a synonym set package getsynonymrule diff --git a/typedapi/synonyms/getsynonymrule/response.go b/typedapi/synonyms/getsynonymrule/response.go index 2e2bc16827..dc1ebc5326 100644 --- a/typedapi/synonyms/getsynonymrule/response.go +++ b/typedapi/synonyms/getsynonymrule/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getsynonymrule // Response holds the response body struct for the package getsynonymrule // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/synonyms/get_synonym_rule/SynonymRuleGetResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/synonyms/get_synonym_rule/SynonymRuleGetResponse.ts#L22-L24 type Response struct { // Id Synonym Rule identifier diff --git a/typedapi/synonyms/getsynonymssets/get_synonyms_sets.go b/typedapi/synonyms/getsynonymssets/get_synonyms_sets.go index e2855c7a57..1fcd7ef288 100644 --- a/typedapi/synonyms/getsynonymssets/get_synonyms_sets.go +++ b/typedapi/synonyms/getsynonymssets/get_synonyms_sets.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves a summary of all defined synonym sets package getsynonymssets diff --git a/typedapi/synonyms/getsynonymssets/response.go b/typedapi/synonyms/getsynonymssets/response.go index e372f762b2..57ad16e567 100644 --- a/typedapi/synonyms/getsynonymssets/response.go +++ b/typedapi/synonyms/getsynonymssets/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getsynonymssets @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getsynonymssets // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/synonyms/get_synonyms_sets/SynonymsSetsGetResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/synonyms/get_synonyms_sets/SynonymsSetsGetResponse.ts#L23-L28 type Response struct { Count int `json:"count"` Results []types.SynonymsSetItem `json:"results"` diff --git a/typedapi/synonyms/putsynonym/put_synonym.go b/typedapi/synonyms/putsynonym/put_synonym.go index 98c38e403d..6a01e680c2 100644 --- a/typedapi/synonyms/putsynonym/put_synonym.go +++ b/typedapi/synonyms/putsynonym/put_synonym.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates or updates a synonyms set package putsynonym diff --git a/typedapi/synonyms/putsynonym/request.go b/typedapi/synonyms/putsynonym/request.go index baa0592c45..25dc445d54 100644 --- a/typedapi/synonyms/putsynonym/request.go +++ b/typedapi/synonyms/putsynonym/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putsynonym @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putsynonym // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/synonyms/put_synonym/SynonymsPutRequest.ts#L23-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/synonyms/put_synonym/SynonymsPutRequest.ts#L23-L42 type Request struct { // SynonymsSet The synonym set information to update diff --git a/typedapi/synonyms/putsynonym/response.go b/typedapi/synonyms/putsynonym/response.go index 50386d9d17..85f49cc73e 100644 --- a/typedapi/synonyms/putsynonym/response.go +++ b/typedapi/synonyms/putsynonym/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putsynonym @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package putsynonym // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/synonyms/put_synonym/SynonymsPutResponse.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/synonyms/put_synonym/SynonymsPutResponse.ts#L24-L29 type Response struct { ReloadAnalyzersDetails types.ReloadDetails `json:"reload_analyzers_details"` Result result.Result `json:"result"` diff --git a/typedapi/synonyms/putsynonymrule/put_synonym_rule.go b/typedapi/synonyms/putsynonymrule/put_synonym_rule.go index 90e98708f4..10e5f579fe 100644 --- a/typedapi/synonyms/putsynonymrule/put_synonym_rule.go +++ b/typedapi/synonyms/putsynonymrule/put_synonym_rule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates or updates a synonym rule in a synonym set package putsynonymrule diff --git a/typedapi/synonyms/putsynonymrule/request.go b/typedapi/synonyms/putsynonymrule/request.go index 5004f5c57b..4f0d4b85d8 100644 --- a/typedapi/synonyms/putsynonymrule/request.go +++ b/typedapi/synonyms/putsynonymrule/request.go @@ -16,21 +16,18 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putsynonymrule import ( - "bytes" "encoding/json" - "errors" "fmt" - "io" ) // Request holds the request body struct for the package putsynonymrule // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/synonyms/put_synonym_rule/SynonymRulePutRequest.ts#L23-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/synonyms/put_synonym_rule/SynonymRulePutRequest.ts#L23-L47 type Request struct { Synonyms string `json:"synonyms"` } @@ -52,27 +49,3 @@ func (r *Request) FromJSON(data string) (*Request, error) { return &req, nil } - -func (s *Request) UnmarshalJSON(data []byte) error { - dec := json.NewDecoder(bytes.NewReader(data)) - - for { - t, err := dec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "synonyms": - if err := dec.Decode(&s.Synonyms); err != nil { - return err - } - - } - } - return nil -} diff --git a/typedapi/synonyms/putsynonymrule/response.go b/typedapi/synonyms/putsynonymrule/response.go index 63d0b57d8f..0e107fddcb 100644 --- a/typedapi/synonyms/putsynonymrule/response.go +++ b/typedapi/synonyms/putsynonymrule/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putsynonymrule @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package putsynonymrule // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/synonyms/put_synonym_rule/SynonymRulePutResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/synonyms/put_synonym_rule/SynonymRulePutResponse.ts#L22-L24 type Response struct { // ReloadAnalyzersDetails Updating synonyms in a synonym set reloads the associated analyzers. diff --git a/typedapi/tasks/cancel/cancel.go b/typedapi/tasks/cancel/cancel.go index 353f03fb83..c9cb36684c 100644 --- a/typedapi/tasks/cancel/cancel.go +++ b/typedapi/tasks/cancel/cancel.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Cancels a task, if it can be cancelled through an API. package cancel diff --git a/typedapi/tasks/cancel/response.go b/typedapi/tasks/cancel/response.go index 40c1bd19cd..30d7d55eb5 100644 --- a/typedapi/tasks/cancel/response.go +++ b/typedapi/tasks/cancel/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package cancel @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types" @@ -31,7 +32,7 @@ import ( // Response holds the response body struct for the package cancel // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/tasks/cancel/CancelTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/tasks/cancel/CancelTasksResponse.ts#L22-L24 type Response struct { NodeFailures []types.ErrorCause `json:"node_failures,omitempty"` // Nodes Task information grouped by node, if `group_by` was set to `node` (the @@ -68,7 +69,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "node_failures": if err := dec.Decode(&s.NodeFailures); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeFailures", err) } case "nodes": @@ -76,12 +77,12 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Nodes = make(map[string]types.NodeTasks, 0) } if err := dec.Decode(&s.Nodes); err != nil { - return err + return fmt.Errorf("%s | %w", "Nodes", err) } case "task_failures": if err := dec.Decode(&s.TaskFailures); err != nil { - return err + return fmt.Errorf("%s | %w", "TaskFailures", err) } case "tasks": @@ -94,13 +95,13 @@ func (s *Response) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]types.ParentTaskInfo, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Tasks", err) } s.Tasks = o case '[': o := []types.TaskInfo{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Tasks", err) } s.Tasks = o } diff --git a/typedapi/tasks/get/get.go b/typedapi/tasks/get/get.go index 836c31697e..3964d8c92f 100644 --- a/typedapi/tasks/get/get.go +++ b/typedapi/tasks/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns information about a task. package get diff --git a/typedapi/tasks/get/response.go b/typedapi/tasks/get/response.go index 0fbecbc8f8..a15a42cdd0 100644 --- a/typedapi/tasks/get/response.go +++ b/typedapi/tasks/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package get @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/tasks/get/GetTaskResponse.ts#L24-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/tasks/get/GetTaskResponse.ts#L24-L31 type Response struct { Completed bool `json:"completed"` Error *types.ErrorCause `json:"error,omitempty"` diff --git a/typedapi/tasks/list/list.go b/typedapi/tasks/list/list.go index dcf67ca5d6..3455b2fdcb 100644 --- a/typedapi/tasks/list/list.go +++ b/typedapi/tasks/list/list.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Returns a list of tasks. package list diff --git a/typedapi/tasks/list/response.go b/typedapi/tasks/list/response.go index 15e739f55d..c47fe9d959 100644 --- a/typedapi/tasks/list/response.go +++ b/typedapi/tasks/list/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package list @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types" @@ -31,7 +32,7 @@ import ( // Response holds the response body struct for the package list // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/tasks/list/ListTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/tasks/list/ListTasksResponse.ts#L22-L24 type Response struct { NodeFailures []types.ErrorCause `json:"node_failures,omitempty"` // Nodes Task information grouped by node, if `group_by` was set to `node` (the @@ -68,7 +69,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "node_failures": if err := dec.Decode(&s.NodeFailures); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeFailures", err) } case "nodes": @@ -76,12 +77,12 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Nodes = make(map[string]types.NodeTasks, 0) } if err := dec.Decode(&s.Nodes); err != nil { - return err + return fmt.Errorf("%s | %w", "Nodes", err) } case "task_failures": if err := dec.Decode(&s.TaskFailures); err != nil { - return err + return fmt.Errorf("%s | %w", "TaskFailures", err) } case "tasks": @@ -94,13 +95,13 @@ func (s *Response) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]types.ParentTaskInfo, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Tasks", err) } s.Tasks = o case '[': o := []types.TaskInfo{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Tasks", err) } s.Tasks = o } diff --git a/typedapi/textstructure/findstructure/find_structure.go b/typedapi/textstructure/findstructure/find_structure.go index 58b0ca16d0..4b482caf9d 100644 --- a/typedapi/textstructure/findstructure/find_structure.go +++ b/typedapi/textstructure/findstructure/find_structure.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Finds the structure of a text file. The text file must contain data that is // suitable to be ingested into Elasticsearch. diff --git a/typedapi/textstructure/findstructure/request.go b/typedapi/textstructure/findstructure/request.go index 838df4216b..278c56fac9 100644 --- a/typedapi/textstructure/findstructure/request.go +++ b/typedapi/textstructure/findstructure/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package findstructure @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package findstructure // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/text_structure/find_structure/FindStructureRequest.ts#L24-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/text_structure/find_structure/FindStructureRequest.ts#L24-L75 type Request = []json.RawMessage diff --git a/typedapi/textstructure/findstructure/response.go b/typedapi/textstructure/findstructure/response.go index 302a949828..fc3170f173 100644 --- a/typedapi/textstructure/findstructure/response.go +++ b/typedapi/textstructure/findstructure/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package findstructure @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package findstructure // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/text_structure/find_structure/FindStructureResponse.ts#L27-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/text_structure/find_structure/FindStructureResponse.ts#L27-L52 type Response struct { Charset string `json:"charset"` ColumnNames []string `json:"column_names,omitempty"` diff --git a/typedapi/textstructure/testgrokpattern/request.go b/typedapi/textstructure/testgrokpattern/request.go index c391b405e4..90130ac608 100644 --- a/typedapi/textstructure/testgrokpattern/request.go +++ b/typedapi/textstructure/testgrokpattern/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package testgrokpattern @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package testgrokpattern // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/text_structure/test_grok_pattern/TestGrokPatternRequest.ts#L22-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/text_structure/test_grok_pattern/TestGrokPatternRequest.ts#L22-L43 type Request struct { // GrokPattern Grok pattern to run on the text. diff --git a/typedapi/textstructure/testgrokpattern/response.go b/typedapi/textstructure/testgrokpattern/response.go index 4fd880a51d..94c1407878 100644 --- a/typedapi/textstructure/testgrokpattern/response.go +++ b/typedapi/textstructure/testgrokpattern/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package testgrokpattern @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package testgrokpattern // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/text_structure/test_grok_pattern/TestGrokPatternResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/text_structure/test_grok_pattern/TestGrokPatternResponse.ts#L22-L26 type Response struct { Matches []types.MatchedText `json:"matches"` } diff --git a/typedapi/textstructure/testgrokpattern/test_grok_pattern.go b/typedapi/textstructure/testgrokpattern/test_grok_pattern.go index 4f7ba830d7..82fe00a660 100644 --- a/typedapi/textstructure/testgrokpattern/test_grok_pattern.go +++ b/typedapi/textstructure/testgrokpattern/test_grok_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Tests a Grok pattern on some text. package testgrokpattern @@ -74,7 +74,7 @@ func NewTestGrokPatternFunc(tp elastictransport.Interface) NewTestGrokPattern { // Tests a Grok pattern on some text. // -// https://www.elastic.co/guide/en/elasticsearch/reference/current/test-grok-pattern-api.html +// https://www.elastic.co/guide/en/elasticsearch/reference/current/test-grok-pattern.html func New(tp elastictransport.Interface) *TestGrokPattern { r := &TestGrokPattern{ transport: tp, diff --git a/typedapi/transform/deletetransform/delete_transform.go b/typedapi/transform/deletetransform/delete_transform.go index e88f958b58..ba5609a3ef 100644 --- a/typedapi/transform/deletetransform/delete_transform.go +++ b/typedapi/transform/deletetransform/delete_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deletes an existing transform. package deletetransform diff --git a/typedapi/transform/deletetransform/response.go b/typedapi/transform/deletetransform/response.go index 1d0236c2d8..9f9909a0a7 100644 --- a/typedapi/transform/deletetransform/response.go +++ b/typedapi/transform/deletetransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletetransform // Response holds the response body struct for the package deletetransform // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/delete_transform/DeleteTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/delete_transform/DeleteTransformResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/transform/gettransform/get_transform.go b/typedapi/transform/gettransform/get_transform.go index 9f8b6b45ee..800ba487ac 100644 --- a/typedapi/transform/gettransform/get_transform.go +++ b/typedapi/transform/gettransform/get_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves configuration information for transforms. package gettransform diff --git a/typedapi/transform/gettransform/response.go b/typedapi/transform/gettransform/response.go index 21c233b054..bc21348e34 100644 --- a/typedapi/transform/gettransform/response.go +++ b/typedapi/transform/gettransform/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package gettransform @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettransform // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/get_transform/GetTransformResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/get_transform/GetTransformResponse.ts#L23-L25 type Response struct { Count int64 `json:"count"` Transforms []types.TransformSummary `json:"transforms"` diff --git a/typedapi/transform/gettransformstats/get_transform_stats.go b/typedapi/transform/gettransformstats/get_transform_stats.go index e059822239..0aced1126b 100644 --- a/typedapi/transform/gettransformstats/get_transform_stats.go +++ b/typedapi/transform/gettransformstats/get_transform_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves usage information for transforms. package gettransformstats diff --git a/typedapi/transform/gettransformstats/response.go b/typedapi/transform/gettransformstats/response.go index 4ee37a4f2e..a94b26da3f 100644 --- a/typedapi/transform/gettransformstats/response.go +++ b/typedapi/transform/gettransformstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package gettransformstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettransformstats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/get_transform_stats/GetTransformStatsResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/get_transform_stats/GetTransformStatsResponse.ts#L23-L25 type Response struct { Count int64 `json:"count"` Transforms []types.TransformStats `json:"transforms"` diff --git a/typedapi/transform/previewtransform/preview_transform.go b/typedapi/transform/previewtransform/preview_transform.go index 7fb9d11cab..6a7d148dae 100644 --- a/typedapi/transform/previewtransform/preview_transform.go +++ b/typedapi/transform/previewtransform/preview_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Previews a transform. package previewtransform diff --git a/typedapi/transform/previewtransform/request.go b/typedapi/transform/previewtransform/request.go index 4d8d5cc0a6..d1b55b42ac 100644 --- a/typedapi/transform/previewtransform/request.go +++ b/typedapi/transform/previewtransform/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package previewtransform @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package previewtransform // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/preview_transform/PreviewTransformRequest.ts#L33-L107 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/preview_transform/PreviewTransformRequest.ts#L33-L107 type Request struct { // Description Free text description of the transform. @@ -98,7 +98,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,42 +109,42 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "dest": if err := dec.Decode(&s.Dest); err != nil { - return err + return fmt.Errorf("%s | %w", "Dest", err) } case "frequency": if err := dec.Decode(&s.Frequency); err != nil { - return err + return fmt.Errorf("%s | %w", "Frequency", err) } case "latest": if err := dec.Decode(&s.Latest); err != nil { - return err + return fmt.Errorf("%s | %w", "Latest", err) } case "pivot": if err := dec.Decode(&s.Pivot); err != nil { - return err + return fmt.Errorf("%s | %w", "Pivot", err) } case "retention_policy": if err := dec.Decode(&s.RetentionPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "RetentionPolicy", err) } case "settings": if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "source": if err := dec.Decode(&s.Source); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } case "sync": if err := dec.Decode(&s.Sync); err != nil { - return err + return fmt.Errorf("%s | %w", "Sync", err) } } diff --git a/typedapi/transform/previewtransform/response.go b/typedapi/transform/previewtransform/response.go index 47dd5847f9..4f0b6bd8cd 100644 --- a/typedapi/transform/previewtransform/response.go +++ b/typedapi/transform/previewtransform/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package previewtransform @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package previewtransform // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/preview_transform/PreviewTransformResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/preview_transform/PreviewTransformResponse.ts#L22-L27 type Response struct { GeneratedDestIndex types.IndexState `json:"generated_dest_index"` Preview []json.RawMessage `json:"preview"` diff --git a/typedapi/transform/puttransform/put_transform.go b/typedapi/transform/puttransform/put_transform.go index 94bd3929b7..77610e4b25 100644 --- a/typedapi/transform/puttransform/put_transform.go +++ b/typedapi/transform/puttransform/put_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Instantiates a transform. package puttransform diff --git a/typedapi/transform/puttransform/request.go b/typedapi/transform/puttransform/request.go index 867c93612b..3afbadc157 100644 --- a/typedapi/transform/puttransform/request.go +++ b/typedapi/transform/puttransform/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package puttransform @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package puttransform // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/put_transform/PutTransformRequest.ts#L33-L122 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/put_transform/PutTransformRequest.ts#L33-L122 type Request struct { // Description Free text description of the transform. @@ -102,7 +102,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -113,47 +113,47 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "dest": if err := dec.Decode(&s.Dest); err != nil { - return err + return fmt.Errorf("%s | %w", "Dest", err) } case "frequency": if err := dec.Decode(&s.Frequency); err != nil { - return err + return fmt.Errorf("%s | %w", "Frequency", err) } case "latest": if err := dec.Decode(&s.Latest); err != nil { - return err + return fmt.Errorf("%s | %w", "Latest", err) } case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "pivot": if err := dec.Decode(&s.Pivot); err != nil { - return err + return fmt.Errorf("%s | %w", "Pivot", err) } case "retention_policy": if err := dec.Decode(&s.RetentionPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "RetentionPolicy", err) } case "settings": if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "source": if err := dec.Decode(&s.Source); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } case "sync": if err := dec.Decode(&s.Sync); err != nil { - return err + return fmt.Errorf("%s | %w", "Sync", err) } } diff --git a/typedapi/transform/puttransform/response.go b/typedapi/transform/puttransform/response.go index d8c19999d6..40c11216c3 100644 --- a/typedapi/transform/puttransform/response.go +++ b/typedapi/transform/puttransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package puttransform // Response holds the response body struct for the package puttransform // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/put_transform/PutTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/put_transform/PutTransformResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/transform/resettransform/reset_transform.go b/typedapi/transform/resettransform/reset_transform.go index df16507be9..359d98051e 100644 --- a/typedapi/transform/resettransform/reset_transform.go +++ b/typedapi/transform/resettransform/reset_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Resets an existing transform. package resettransform diff --git a/typedapi/transform/resettransform/response.go b/typedapi/transform/resettransform/response.go index 29e58329bf..41d382c769 100644 --- a/typedapi/transform/resettransform/response.go +++ b/typedapi/transform/resettransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package resettransform // Response holds the response body struct for the package resettransform // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/reset_transform/ResetTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/reset_transform/ResetTransformResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/transform/schedulenowtransform/response.go b/typedapi/transform/schedulenowtransform/response.go index cce0f73033..0eaf103294 100644 --- a/typedapi/transform/schedulenowtransform/response.go +++ b/typedapi/transform/schedulenowtransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package schedulenowtransform // Response holds the response body struct for the package schedulenowtransform // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/schedule_now_transform/ScheduleNowTransformResponse.ts#L21-L23 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/schedule_now_transform/ScheduleNowTransformResponse.ts#L21-L23 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/transform/schedulenowtransform/schedule_now_transform.go b/typedapi/transform/schedulenowtransform/schedule_now_transform.go index 273e71459e..5eac28cc75 100644 --- a/typedapi/transform/schedulenowtransform/schedule_now_transform.go +++ b/typedapi/transform/schedulenowtransform/schedule_now_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Schedules now a transform. package schedulenowtransform diff --git a/typedapi/transform/starttransform/response.go b/typedapi/transform/starttransform/response.go index 85950c1fbd..16ada37b95 100644 --- a/typedapi/transform/starttransform/response.go +++ b/typedapi/transform/starttransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package starttransform // Response holds the response body struct for the package starttransform // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/start_transform/StartTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/start_transform/StartTransformResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/transform/starttransform/start_transform.go b/typedapi/transform/starttransform/start_transform.go index 9dc7f09fcb..5960f19c18 100644 --- a/typedapi/transform/starttransform/start_transform.go +++ b/typedapi/transform/starttransform/start_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Starts one or more transforms. package starttransform diff --git a/typedapi/transform/stoptransform/response.go b/typedapi/transform/stoptransform/response.go index e13e4781b0..842cf942ee 100644 --- a/typedapi/transform/stoptransform/response.go +++ b/typedapi/transform/stoptransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stoptransform // Response holds the response body struct for the package stoptransform // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/stop_transform/StopTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/stop_transform/StopTransformResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/transform/stoptransform/stop_transform.go b/typedapi/transform/stoptransform/stop_transform.go index 2573e10d2b..010c5db45c 100644 --- a/typedapi/transform/stoptransform/stop_transform.go +++ b/typedapi/transform/stoptransform/stop_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Stops one or more transforms. package stoptransform diff --git a/typedapi/transform/updatetransform/request.go b/typedapi/transform/updatetransform/request.go index 844a051e9e..b2ba63ae6f 100644 --- a/typedapi/transform/updatetransform/request.go +++ b/typedapi/transform/updatetransform/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatetransform @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package updatetransform // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/update_transform/UpdateTransformRequest.ts#L31-L105 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/update_transform/UpdateTransformRequest.ts#L31-L105 type Request struct { // Description Free text description of the transform. @@ -93,7 +93,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,37 +104,37 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "dest": if err := dec.Decode(&s.Dest); err != nil { - return err + return fmt.Errorf("%s | %w", "Dest", err) } case "frequency": if err := dec.Decode(&s.Frequency); err != nil { - return err + return fmt.Errorf("%s | %w", "Frequency", err) } case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "retention_policy": if err := dec.Decode(&s.RetentionPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "RetentionPolicy", err) } case "settings": if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "source": if err := dec.Decode(&s.Source); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } case "sync": if err := dec.Decode(&s.Sync); err != nil { - return err + return fmt.Errorf("%s | %w", "Sync", err) } } diff --git a/typedapi/transform/updatetransform/response.go b/typedapi/transform/updatetransform/response.go index 84ae61bf6c..b9328fa0b1 100644 --- a/typedapi/transform/updatetransform/response.go +++ b/typedapi/transform/updatetransform/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package updatetransform @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatetransform // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/update_transform/UpdateTransformResponse.ts#L33-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/update_transform/UpdateTransformResponse.ts#L33-L51 type Response struct { Authorization *types.TransformAuthorization `json:"authorization,omitempty"` CreateTime int64 `json:"create_time"` diff --git a/typedapi/transform/updatetransform/update_transform.go b/typedapi/transform/updatetransform/update_transform.go index 8600933391..6702e421c1 100644 --- a/typedapi/transform/updatetransform/update_transform.go +++ b/typedapi/transform/updatetransform/update_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Updates certain properties of a transform. package updatetransform diff --git a/typedapi/transform/upgradetransforms/response.go b/typedapi/transform/upgradetransforms/response.go index 5ab820d9cb..6aa94e1399 100644 --- a/typedapi/transform/upgradetransforms/response.go +++ b/typedapi/transform/upgradetransforms/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package upgradetransforms // Response holds the response body struct for the package upgradetransforms // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/upgrade_transforms/UpgradeTransformsResponse.ts#L25-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/upgrade_transforms/UpgradeTransformsResponse.ts#L25-L34 type Response struct { // NeedsUpdate The number of transforms that need to be upgraded. diff --git a/typedapi/transform/upgradetransforms/upgrade_transforms.go b/typedapi/transform/upgradetransforms/upgrade_transforms.go index 99d79cd0d7..daf8dce5cf 100644 --- a/typedapi/transform/upgradetransforms/upgrade_transforms.go +++ b/typedapi/transform/upgradetransforms/upgrade_transforms.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Upgrades all transforms. package upgradetransforms diff --git a/typedapi/types/acknowledgement.go b/typedapi/types/acknowledgement.go index 31a0ed21d8..3e013bb1f4 100644 --- a/typedapi/types/acknowledgement.go +++ b/typedapi/types/acknowledgement.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Acknowledgement type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/license/post/types.ts#L20-L23 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/license/post/types.ts#L20-L23 type Acknowledgement struct { License []string `json:"license"` Message string `json:"message"` @@ -53,13 +54,13 @@ func (s *Acknowledgement) UnmarshalJSON(data []byte) error { case "license": if err := dec.Decode(&s.License); err != nil { - return err + return fmt.Errorf("%s | %w", "License", err) } case "message": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Message", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/acknowledgestate.go b/typedapi/types/acknowledgestate.go index 689b1957c4..cd80921bbd 100644 --- a/typedapi/types/acknowledgestate.go +++ b/typedapi/types/acknowledgestate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/acknowledgementoptions" @@ -31,7 +32,7 @@ import ( // AcknowledgeState type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Action.ts#L115-L118 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Action.ts#L115-L118 type AcknowledgeState struct { State acknowledgementoptions.AcknowledgementOptions `json:"state"` Timestamp DateTime `json:"timestamp"` @@ -54,12 +55,12 @@ func (s *AcknowledgeState) UnmarshalJSON(data []byte) error { case "state": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } } diff --git a/typedapi/types/actionstatus.go b/typedapi/types/actionstatus.go index d27c2fd4e2..120be481b2 100644 --- a/typedapi/types/actionstatus.go +++ b/typedapi/types/actionstatus.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ActionStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Action.ts#L131-L136 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Action.ts#L131-L136 type ActionStatus struct { Ack AcknowledgeState `json:"ack"` LastExecution *ExecutionState `json:"last_execution,omitempty"` diff --git a/typedapi/types/activationstate.go b/typedapi/types/activationstate.go index 1d579a715e..2b03218b6f 100644 --- a/typedapi/types/activationstate.go +++ b/typedapi/types/activationstate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ActivationState type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Activation.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Activation.ts#L24-L27 type ActivationState struct { Active bool `json:"active"` Timestamp DateTime `json:"timestamp"` @@ -58,7 +59,7 @@ func (s *ActivationState) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Active", err) } s.Active = value case bool: @@ -67,7 +68,7 @@ func (s *ActivationState) UnmarshalJSON(data []byte) error { case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } } diff --git a/typedapi/types/activationstatus.go b/typedapi/types/activationstatus.go index e648c564e7..ed323cc2c7 100644 --- a/typedapi/types/activationstatus.go +++ b/typedapi/types/activationstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ActivationStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Activation.ts#L29-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Activation.ts#L29-L33 type ActivationStatus struct { Actions WatcherStatusActions `json:"actions"` State ActivationState `json:"state"` @@ -53,17 +54,17 @@ func (s *ActivationStatus) UnmarshalJSON(data []byte) error { case "actions": if err := dec.Decode(&s.Actions); err != nil { - return err + return fmt.Errorf("%s | %w", "Actions", err) } case "state": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/adaptiveselection.go b/typedapi/types/adaptiveselection.go index 71065adad3..82ed636c72 100644 --- a/typedapi/types/adaptiveselection.go +++ b/typedapi/types/adaptiveselection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AdaptiveSelection type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L403-L432 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L403-L432 type AdaptiveSelection struct { // AvgQueueSize The exponentially weighted moving average queue size of search requests on // the keyed node. @@ -76,7 +77,7 @@ func (s *AdaptiveSelection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AvgQueueSize", err) } s.AvgQueueSize = &value case float64: @@ -86,7 +87,7 @@ func (s *AdaptiveSelection) UnmarshalJSON(data []byte) error { case "avg_response_time": if err := dec.Decode(&s.AvgResponseTime); err != nil { - return err + return fmt.Errorf("%s | %w", "AvgResponseTime", err) } case "avg_response_time_ns": @@ -96,7 +97,7 @@ func (s *AdaptiveSelection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AvgResponseTimeNs", err) } s.AvgResponseTimeNs = &value case float64: @@ -106,7 +107,7 @@ func (s *AdaptiveSelection) UnmarshalJSON(data []byte) error { case "avg_service_time": if err := dec.Decode(&s.AvgServiceTime); err != nil { - return err + return fmt.Errorf("%s | %w", "AvgServiceTime", err) } case "avg_service_time_ns": @@ -116,7 +117,7 @@ func (s *AdaptiveSelection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AvgServiceTimeNs", err) } s.AvgServiceTimeNs = &value case float64: @@ -131,7 +132,7 @@ func (s *AdaptiveSelection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "OutgoingSearches", err) } s.OutgoingSearches = &value case float64: @@ -142,7 +143,7 @@ func (s *AdaptiveSelection) UnmarshalJSON(data []byte) error { case "rank": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Rank", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/addaction.go b/typedapi/types/addaction.go index 504f2ef2b8..7593dcddc5 100644 --- a/typedapi/types/addaction.go +++ b/typedapi/types/addaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AddAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/update_aliases/types.ts#L41-L95 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/update_aliases/types.ts#L41-L95 type AddAction struct { // Alias Alias for the action. // Index alias names support date math. @@ -82,7 +83,7 @@ func (s *AddAction) UnmarshalJSON(data []byte) error { case "alias": if err := dec.Decode(&s.Alias); err != nil { - return err + return fmt.Errorf("%s | %w", "Alias", err) } case "aliases": @@ -91,29 +92,29 @@ func (s *AddAction) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aliases", err) } s.Aliases = append(s.Aliases, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Aliases); err != nil { - return err + return fmt.Errorf("%s | %w", "Aliases", err) } } case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "index_routing": if err := dec.Decode(&s.IndexRouting); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexRouting", err) } case "indices": @@ -122,13 +123,13 @@ func (s *AddAction) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } @@ -139,7 +140,7 @@ func (s *AddAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsHidden", err) } s.IsHidden = &value case bool: @@ -153,7 +154,7 @@ func (s *AddAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsWriteIndex", err) } s.IsWriteIndex = &value case bool: @@ -167,7 +168,7 @@ func (s *AddAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MustExist", err) } s.MustExist = &value case bool: @@ -176,12 +177,12 @@ func (s *AddAction) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "search_routing": if err := dec.Decode(&s.SearchRouting); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchRouting", err) } } diff --git a/typedapi/types/adjacencymatrixaggregate.go b/typedapi/types/adjacencymatrixaggregate.go index 456cf19b00..f8d93fd457 100644 --- a/typedapi/types/adjacencymatrixaggregate.go +++ b/typedapi/types/adjacencymatrixaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // AdjacencyMatrixAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L573-L575 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L573-L575 type AdjacencyMatrixAggregate struct { Buckets BucketsAdjacencyMatrixBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *AdjacencyMatrixAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]AdjacencyMatrixBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []AdjacencyMatrixBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/adjacencymatrixaggregation.go b/typedapi/types/adjacencymatrixaggregation.go index e1f65e770c..1489644d7f 100644 --- a/typedapi/types/adjacencymatrixaggregation.go +++ b/typedapi/types/adjacencymatrixaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AdjacencyMatrixAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L57-L63 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L57-L63 type AdjacencyMatrixAggregation struct { // Filters Filters used to create buckets. // At least one filter is required. @@ -59,18 +60,18 @@ func (s *AdjacencyMatrixAggregation) UnmarshalJSON(data []byte) error { s.Filters = make(map[string]Query, 0) } if err := dec.Decode(&s.Filters); err != nil { - return err + return fmt.Errorf("%s | %w", "Filters", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/adjacencymatrixbucket.go b/typedapi/types/adjacencymatrixbucket.go index d832bf8743..177753bd11 100644 --- a/typedapi/types/adjacencymatrixbucket.go +++ b/typedapi/types/adjacencymatrixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // AdjacencyMatrixBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L577-L579 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L577-L579 type AdjacencyMatrixBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *AdjacencyMatrixBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -72,7 +72,7 @@ func (s *AdjacencyMatrixBucket) UnmarshalJSON(data []byte) error { case "key": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -95,490 +95,490 @@ func (s *AdjacencyMatrixBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -588,7 +588,7 @@ func (s *AdjacencyMatrixBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/aggregate.go b/typedapi/types/aggregate.go index cfab47f539..091c3b9b0d 100644 --- a/typedapi/types/aggregate.go +++ b/typedapi/types/aggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -92,5 +92,5 @@ package types // MatrixStatsAggregate // GeoLineAggregate // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L38-L123 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L38-L123 type Aggregate interface{} diff --git a/typedapi/types/aggregatemetricdoubleproperty.go b/typedapi/types/aggregatemetricdoubleproperty.go index 238dd07e53..23a50dc178 100644 --- a/typedapi/types/aggregatemetricdoubleproperty.go +++ b/typedapi/types/aggregatemetricdoubleproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // AggregateMetricDoubleProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/complex.ts#L59-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/complex.ts#L60-L65 type AggregateMetricDoubleProperty struct { DefaultMetric string `json:"default_metric"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` @@ -65,7 +66,7 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { case "default_metric": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DefaultMetric", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -76,7 +77,7 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -394,7 +395,7 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -407,12 +408,12 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "metrics": if err := dec.Decode(&s.Metrics); err != nil { - return err + return fmt.Errorf("%s | %w", "Metrics", err) } case "properties": @@ -724,12 +725,12 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { case "time_series_metric": if err := dec.Decode(&s.TimeSeriesMetric); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesMetric", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/aggregateorder.go b/typedapi/types/aggregateorder.go index 51a82996ef..c22c815802 100644 --- a/typedapi/types/aggregateorder.go +++ b/typedapi/types/aggregateorder.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]sortorder.SortOrder // []map[string]sortorder.SortOrder // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L976-L978 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L976-L978 type AggregateOrder interface{} diff --git a/typedapi/types/aggregateoutput.go b/typedapi/types/aggregateoutput.go index 40d21f2889..024efe27a9 100644 --- a/typedapi/types/aggregateoutput.go +++ b/typedapi/types/aggregateoutput.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // AggregateOutput type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/types.ts#L101-L106 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/types.ts#L101-L106 type AggregateOutput struct { Exponent *Weights `json:"exponent,omitempty"` LogisticRegression *Weights `json:"logistic_regression,omitempty"` diff --git a/typedapi/types/aggregation.go b/typedapi/types/aggregation.go index abff351896..e230fa4b8f 100644 --- a/typedapi/types/aggregation.go +++ b/typedapi/types/aggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Aggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregation.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregation.ts#L22-L25 type Aggregation struct { Meta Metadata `json:"meta,omitempty"` Name *string `json:"name,omitempty"` @@ -53,13 +54,13 @@ func (s *Aggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/aggregationbreakdown.go b/typedapi/types/aggregationbreakdown.go index 4aa7e99425..32629f5ff7 100644 --- a/typedapi/types/aggregationbreakdown.go +++ b/typedapi/types/aggregationbreakdown.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AggregationBreakdown type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/profile.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/profile.ts#L23-L36 type AggregationBreakdown struct { BuildAggregation int64 `json:"build_aggregation"` BuildAggregationCount int64 `json:"build_aggregation_count"` @@ -68,7 +69,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BuildAggregation", err) } s.BuildAggregation = value case float64: @@ -83,7 +84,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BuildAggregationCount", err) } s.BuildAggregationCount = value case float64: @@ -98,7 +99,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BuildLeafCollector", err) } s.BuildLeafCollector = value case float64: @@ -113,7 +114,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BuildLeafCollectorCount", err) } s.BuildLeafCollectorCount = value case float64: @@ -128,7 +129,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Collect", err) } s.Collect = value case float64: @@ -143,7 +144,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CollectCount", err) } s.CollectCount = value case float64: @@ -158,7 +159,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Initialize", err) } s.Initialize = value case float64: @@ -173,7 +174,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "InitializeCount", err) } s.InitializeCount = value case float64: @@ -188,7 +189,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PostCollection", err) } s.PostCollection = &value case float64: @@ -203,7 +204,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PostCollectionCount", err) } s.PostCollectionCount = &value case float64: @@ -218,7 +219,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Reduce", err) } s.Reduce = value case float64: @@ -233,7 +234,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ReduceCount", err) } s.ReduceCount = value case float64: diff --git a/typedapi/types/aggregationprofile.go b/typedapi/types/aggregationprofile.go index 78f105fc34..a2df453104 100644 --- a/typedapi/types/aggregationprofile.go +++ b/typedapi/types/aggregationprofile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AggregationProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/profile.ts#L77-L84 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/profile.ts#L77-L84 type AggregationProfile struct { Breakdown AggregationBreakdown `json:"breakdown"` Children []AggregationProfile `json:"children,omitempty"` @@ -57,23 +58,23 @@ func (s *AggregationProfile) UnmarshalJSON(data []byte) error { case "breakdown": if err := dec.Decode(&s.Breakdown); err != nil { - return err + return fmt.Errorf("%s | %w", "Breakdown", err) } case "children": if err := dec.Decode(&s.Children); err != nil { - return err + return fmt.Errorf("%s | %w", "Children", err) } case "debug": if err := dec.Decode(&s.Debug); err != nil { - return err + return fmt.Errorf("%s | %w", "Debug", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,13 +85,13 @@ func (s *AggregationProfile) UnmarshalJSON(data []byte) error { case "time_in_nanos": if err := dec.Decode(&s.TimeInNanos); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeInNanos", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/aggregationprofiledebug.go b/typedapi/types/aggregationprofiledebug.go index 21636acbaa..fe1eafddbe 100644 --- a/typedapi/types/aggregationprofiledebug.go +++ b/typedapi/types/aggregationprofiledebug.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AggregationProfileDebug type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/profile.ts#L39-L68 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/profile.ts#L39-L68 type AggregationProfileDebug struct { BuiltBuckets *int `json:"built_buckets,omitempty"` CharsFetched *int `json:"chars_fetched,omitempty"` @@ -85,7 +86,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "BuiltBuckets", err) } s.BuiltBuckets = &value case float64: @@ -101,7 +102,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CharsFetched", err) } s.CharsFetched = &value case float64: @@ -117,7 +118,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CollectAnalyzedCount", err) } s.CollectAnalyzedCount = &value case float64: @@ -133,7 +134,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CollectAnalyzedNs", err) } s.CollectAnalyzedNs = &value case float64: @@ -144,7 +145,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "collection_strategy": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CollectionStrategy", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -155,13 +156,13 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "deferred_aggregators": if err := dec.Decode(&s.DeferredAggregators); err != nil { - return err + return fmt.Errorf("%s | %w", "DeferredAggregators", err) } case "delegate": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Delegate", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -172,7 +173,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "delegate_debug": if err := dec.Decode(&s.DelegateDebug); err != nil { - return err + return fmt.Errorf("%s | %w", "DelegateDebug", err) } case "empty_collectors_used": @@ -183,7 +184,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EmptyCollectorsUsed", err) } s.EmptyCollectorsUsed = &value case float64: @@ -199,7 +200,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ExtractCount", err) } s.ExtractCount = &value case float64: @@ -215,7 +216,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ExtractNs", err) } s.ExtractNs = &value case float64: @@ -225,7 +226,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "filters": if err := dec.Decode(&s.Filters); err != nil { - return err + return fmt.Errorf("%s | %w", "Filters", err) } case "has_filter": @@ -235,7 +236,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "HasFilter", err) } s.HasFilter = &value case bool: @@ -245,7 +246,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "map_reducer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MapReducer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -262,7 +263,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumericCollectorsUsed", err) } s.NumericCollectorsUsed = &value case float64: @@ -278,7 +279,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OrdinalsCollectorsOverheadTooHigh", err) } s.OrdinalsCollectorsOverheadTooHigh = &value case float64: @@ -294,7 +295,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OrdinalsCollectorsUsed", err) } s.OrdinalsCollectorsUsed = &value case float64: @@ -305,7 +306,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "result_strategy": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultStrategy", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -322,7 +323,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsCollected", err) } s.SegmentsCollected = &value case float64: @@ -338,7 +339,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsCounted", err) } s.SegmentsCounted = &value case float64: @@ -354,7 +355,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsWithDeletedDocs", err) } s.SegmentsWithDeletedDocs = &value case float64: @@ -370,7 +371,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsWithDocCountField", err) } s.SegmentsWithDocCountField = &value case float64: @@ -386,7 +387,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsWithMultiValuedOrds", err) } s.SegmentsWithMultiValuedOrds = &value case float64: @@ -402,7 +403,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsWithSingleValuedOrds", err) } s.SegmentsWithSingleValuedOrds = &value case float64: @@ -418,7 +419,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "StringHashingCollectorsUsed", err) } s.StringHashingCollectorsUsed = &value case float64: @@ -434,7 +435,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SurvivingBuckets", err) } s.SurvivingBuckets = &value case float64: @@ -450,7 +451,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalBuckets", err) } s.TotalBuckets = &value case float64: @@ -466,7 +467,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ValuesFetched", err) } s.ValuesFetched = &value case float64: diff --git a/typedapi/types/aggregationprofiledelegatedebugfilter.go b/typedapi/types/aggregationprofiledelegatedebugfilter.go index 4bd61d7254..7fa169aedd 100644 --- a/typedapi/types/aggregationprofiledelegatedebugfilter.go +++ b/typedapi/types/aggregationprofiledelegatedebugfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AggregationProfileDelegateDebugFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/profile.ts#L70-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/profile.ts#L70-L75 type AggregationProfileDelegateDebugFilter struct { Query *string `json:"query,omitempty"` ResultsFromMetadata *int `json:"results_from_metadata,omitempty"` @@ -56,7 +57,7 @@ func (s *AggregationProfileDelegateDebugFilter) UnmarshalJSON(data []byte) error case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -73,7 +74,7 @@ func (s *AggregationProfileDelegateDebugFilter) UnmarshalJSON(data []byte) error case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsFromMetadata", err) } s.ResultsFromMetadata = &value case float64: @@ -89,7 +90,7 @@ func (s *AggregationProfileDelegateDebugFilter) UnmarshalJSON(data []byte) error case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsCountedInConstantTime", err) } s.SegmentsCountedInConstantTime = &value case float64: @@ -100,7 +101,7 @@ func (s *AggregationProfileDelegateDebugFilter) UnmarshalJSON(data []byte) error case "specialized_for": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SpecializedFor", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/aggregationrange.go b/typedapi/types/aggregationrange.go index b26b70e6ea..cf7b459e6e 100644 --- a/typedapi/types/aggregationrange.go +++ b/typedapi/types/aggregationrange.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AggregationRange type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L672-L685 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L672-L685 type AggregationRange struct { // From Start of the range (inclusive). From string `json:"from,omitempty"` @@ -58,7 +59,7 @@ func (s *AggregationRange) UnmarshalJSON(data []byte) error { case "from": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -70,7 +71,7 @@ func (s *AggregationRange) UnmarshalJSON(data []byte) error { case "key": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,7 +83,7 @@ func (s *AggregationRange) UnmarshalJSON(data []byte) error { case "to": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "To", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/aggregations.go b/typedapi/types/aggregations.go index f44893a616..1ec1565643 100644 --- a/typedapi/types/aggregations.go +++ b/typedapi/types/aggregations.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Aggregations type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/AggregationContainer.ts#L106-L515 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/AggregationContainer.ts#L106-L515 type Aggregations struct { // AdjacencyMatrix A bucket aggregation returning a form of adjacency matrix. // The request provides a collection of named filter expressions, similar to the @@ -303,7 +304,7 @@ func (s *Aggregations) UnmarshalJSON(data []byte) error { case "adjacency_matrix": if err := dec.Decode(&s.AdjacencyMatrix); err != nil { - return err + return fmt.Errorf("%s | %w", "AdjacencyMatrix", err) } case "aggregations", "aggs": @@ -311,232 +312,232 @@ func (s *Aggregations) UnmarshalJSON(data []byte) error { s.Aggregations = make(map[string]Aggregations, 0) } if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "auto_date_histogram": if err := dec.Decode(&s.AutoDateHistogram); err != nil { - return err + return fmt.Errorf("%s | %w", "AutoDateHistogram", err) } case "avg": if err := dec.Decode(&s.Avg); err != nil { - return err + return fmt.Errorf("%s | %w", "Avg", err) } case "avg_bucket": if err := dec.Decode(&s.AvgBucket); err != nil { - return err + return fmt.Errorf("%s | %w", "AvgBucket", err) } case "boxplot": if err := dec.Decode(&s.Boxplot); err != nil { - return err + return fmt.Errorf("%s | %w", "Boxplot", err) } case "bucket_correlation": if err := dec.Decode(&s.BucketCorrelation); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketCorrelation", err) } case "bucket_count_ks_test": if err := dec.Decode(&s.BucketCountKsTest); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketCountKsTest", err) } case "bucket_script": if err := dec.Decode(&s.BucketScript); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketScript", err) } case "bucket_selector": if err := dec.Decode(&s.BucketSelector); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketSelector", err) } case "bucket_sort": if err := dec.Decode(&s.BucketSort); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketSort", err) } case "cardinality": if err := dec.Decode(&s.Cardinality); err != nil { - return err + return fmt.Errorf("%s | %w", "Cardinality", err) } case "categorize_text": if err := dec.Decode(&s.CategorizeText); err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizeText", err) } case "children": if err := dec.Decode(&s.Children); err != nil { - return err + return fmt.Errorf("%s | %w", "Children", err) } case "composite": if err := dec.Decode(&s.Composite); err != nil { - return err + return fmt.Errorf("%s | %w", "Composite", err) } case "cumulative_cardinality": if err := dec.Decode(&s.CumulativeCardinality); err != nil { - return err + return fmt.Errorf("%s | %w", "CumulativeCardinality", err) } case "cumulative_sum": if err := dec.Decode(&s.CumulativeSum); err != nil { - return err + return fmt.Errorf("%s | %w", "CumulativeSum", err) } case "date_histogram": if err := dec.Decode(&s.DateHistogram); err != nil { - return err + return fmt.Errorf("%s | %w", "DateHistogram", err) } case "date_range": if err := dec.Decode(&s.DateRange); err != nil { - return err + return fmt.Errorf("%s | %w", "DateRange", err) } case "derivative": if err := dec.Decode(&s.Derivative); err != nil { - return err + return fmt.Errorf("%s | %w", "Derivative", err) } case "diversified_sampler": if err := dec.Decode(&s.DiversifiedSampler); err != nil { - return err + return fmt.Errorf("%s | %w", "DiversifiedSampler", err) } case "extended_stats": if err := dec.Decode(&s.ExtendedStats); err != nil { - return err + return fmt.Errorf("%s | %w", "ExtendedStats", err) } case "extended_stats_bucket": if err := dec.Decode(&s.ExtendedStatsBucket); err != nil { - return err + return fmt.Errorf("%s | %w", "ExtendedStatsBucket", err) } case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "filters": if err := dec.Decode(&s.Filters); err != nil { - return err + return fmt.Errorf("%s | %w", "Filters", err) } case "frequent_item_sets": if err := dec.Decode(&s.FrequentItemSets); err != nil { - return err + return fmt.Errorf("%s | %w", "FrequentItemSets", err) } case "geo_bounds": if err := dec.Decode(&s.GeoBounds); err != nil { - return err + return fmt.Errorf("%s | %w", "GeoBounds", err) } case "geo_centroid": if err := dec.Decode(&s.GeoCentroid); err != nil { - return err + return fmt.Errorf("%s | %w", "GeoCentroid", err) } case "geo_distance": if err := dec.Decode(&s.GeoDistance); err != nil { - return err + return fmt.Errorf("%s | %w", "GeoDistance", err) } case "geo_line": if err := dec.Decode(&s.GeoLine); err != nil { - return err + return fmt.Errorf("%s | %w", "GeoLine", err) } case "geohash_grid": if err := dec.Decode(&s.GeohashGrid); err != nil { - return err + return fmt.Errorf("%s | %w", "GeohashGrid", err) } case "geohex_grid": if err := dec.Decode(&s.GeohexGrid); err != nil { - return err + return fmt.Errorf("%s | %w", "GeohexGrid", err) } case "geotile_grid": if err := dec.Decode(&s.GeotileGrid); err != nil { - return err + return fmt.Errorf("%s | %w", "GeotileGrid", err) } case "global": if err := dec.Decode(&s.Global); err != nil { - return err + return fmt.Errorf("%s | %w", "Global", err) } case "histogram": if err := dec.Decode(&s.Histogram); err != nil { - return err + return fmt.Errorf("%s | %w", "Histogram", err) } case "inference": if err := dec.Decode(&s.Inference); err != nil { - return err + return fmt.Errorf("%s | %w", "Inference", err) } case "ip_prefix": if err := dec.Decode(&s.IpPrefix); err != nil { - return err + return fmt.Errorf("%s | %w", "IpPrefix", err) } case "ip_range": if err := dec.Decode(&s.IpRange); err != nil { - return err + return fmt.Errorf("%s | %w", "IpRange", err) } case "line": if err := dec.Decode(&s.Line); err != nil { - return err + return fmt.Errorf("%s | %w", "Line", err) } case "matrix_stats": if err := dec.Decode(&s.MatrixStats); err != nil { - return err + return fmt.Errorf("%s | %w", "MatrixStats", err) } case "max": if err := dec.Decode(&s.Max); err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } case "max_bucket": if err := dec.Decode(&s.MaxBucket); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxBucket", err) } case "median_absolute_deviation": if err := dec.Decode(&s.MedianAbsoluteDeviation); err != nil { - return err + return fmt.Errorf("%s | %w", "MedianAbsoluteDeviation", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min": if err := dec.Decode(&s.Min); err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } case "min_bucket": if err := dec.Decode(&s.MinBucket); err != nil { - return err + return fmt.Errorf("%s | %w", "MinBucket", err) } case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "moving_avg": @@ -589,152 +590,152 @@ func (s *Aggregations) UnmarshalJSON(data []byte) error { case "moving_fn": if err := dec.Decode(&s.MovingFn); err != nil { - return err + return fmt.Errorf("%s | %w", "MovingFn", err) } case "moving_percentiles": if err := dec.Decode(&s.MovingPercentiles); err != nil { - return err + return fmt.Errorf("%s | %w", "MovingPercentiles", err) } case "multi_terms": if err := dec.Decode(&s.MultiTerms); err != nil { - return err + return fmt.Errorf("%s | %w", "MultiTerms", err) } case "nested": if err := dec.Decode(&s.Nested); err != nil { - return err + return fmt.Errorf("%s | %w", "Nested", err) } case "normalize": if err := dec.Decode(&s.Normalize); err != nil { - return err + return fmt.Errorf("%s | %w", "Normalize", err) } case "parent": if err := dec.Decode(&s.Parent); err != nil { - return err + return fmt.Errorf("%s | %w", "Parent", err) } case "percentile_ranks": if err := dec.Decode(&s.PercentileRanks); err != nil { - return err + return fmt.Errorf("%s | %w", "PercentileRanks", err) } case "percentiles": if err := dec.Decode(&s.Percentiles); err != nil { - return err + return fmt.Errorf("%s | %w", "Percentiles", err) } case "percentiles_bucket": if err := dec.Decode(&s.PercentilesBucket); err != nil { - return err + return fmt.Errorf("%s | %w", "PercentilesBucket", err) } case "range": if err := dec.Decode(&s.Range); err != nil { - return err + return fmt.Errorf("%s | %w", "Range", err) } case "rare_terms": if err := dec.Decode(&s.RareTerms); err != nil { - return err + return fmt.Errorf("%s | %w", "RareTerms", err) } case "rate": if err := dec.Decode(&s.Rate); err != nil { - return err + return fmt.Errorf("%s | %w", "Rate", err) } case "reverse_nested": if err := dec.Decode(&s.ReverseNested); err != nil { - return err + return fmt.Errorf("%s | %w", "ReverseNested", err) } case "sampler": if err := dec.Decode(&s.Sampler); err != nil { - return err + return fmt.Errorf("%s | %w", "Sampler", err) } case "scripted_metric": if err := dec.Decode(&s.ScriptedMetric); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptedMetric", err) } case "serial_diff": if err := dec.Decode(&s.SerialDiff); err != nil { - return err + return fmt.Errorf("%s | %w", "SerialDiff", err) } case "significant_terms": if err := dec.Decode(&s.SignificantTerms); err != nil { - return err + return fmt.Errorf("%s | %w", "SignificantTerms", err) } case "significant_text": if err := dec.Decode(&s.SignificantText); err != nil { - return err + return fmt.Errorf("%s | %w", "SignificantText", err) } case "stats": if err := dec.Decode(&s.Stats); err != nil { - return err + return fmt.Errorf("%s | %w", "Stats", err) } case "stats_bucket": if err := dec.Decode(&s.StatsBucket); err != nil { - return err + return fmt.Errorf("%s | %w", "StatsBucket", err) } case "string_stats": if err := dec.Decode(&s.StringStats); err != nil { - return err + return fmt.Errorf("%s | %w", "StringStats", err) } case "sum": if err := dec.Decode(&s.Sum); err != nil { - return err + return fmt.Errorf("%s | %w", "Sum", err) } case "sum_bucket": if err := dec.Decode(&s.SumBucket); err != nil { - return err + return fmt.Errorf("%s | %w", "SumBucket", err) } case "t_test": if err := dec.Decode(&s.TTest); err != nil { - return err + return fmt.Errorf("%s | %w", "TTest", err) } case "terms": if err := dec.Decode(&s.Terms); err != nil { - return err + return fmt.Errorf("%s | %w", "Terms", err) } case "top_hits": if err := dec.Decode(&s.TopHits); err != nil { - return err + return fmt.Errorf("%s | %w", "TopHits", err) } case "top_metrics": if err := dec.Decode(&s.TopMetrics); err != nil { - return err + return fmt.Errorf("%s | %w", "TopMetrics", err) } case "value_count": if err := dec.Decode(&s.ValueCount); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueCount", err) } case "variable_width_histogram": if err := dec.Decode(&s.VariableWidthHistogram); err != nil { - return err + return fmt.Errorf("%s | %w", "VariableWidthHistogram", err) } case "weighted_avg": if err := dec.Decode(&s.WeightedAvg); err != nil { - return err + return fmt.Errorf("%s | %w", "WeightedAvg", err) } } diff --git a/typedapi/types/alias.go b/typedapi/types/alias.go index 795ef7a6d0..9778c25ba6 100644 --- a/typedapi/types/alias.go +++ b/typedapi/types/alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Alias type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/Alias.ts#L23-L53 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/Alias.ts#L23-L53 type Alias struct { // Filter Query used to limit documents the alias can access. Filter *Query `json:"filter,omitempty"` @@ -66,12 +67,12 @@ func (s *Alias) UnmarshalJSON(data []byte) error { case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "index_routing": if err := dec.Decode(&s.IndexRouting); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexRouting", err) } case "is_hidden": @@ -81,7 +82,7 @@ func (s *Alias) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsHidden", err) } s.IsHidden = &value case bool: @@ -95,7 +96,7 @@ func (s *Alias) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsWriteIndex", err) } s.IsWriteIndex = &value case bool: @@ -104,12 +105,12 @@ func (s *Alias) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "search_routing": if err := dec.Decode(&s.SearchRouting); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchRouting", err) } } diff --git a/typedapi/types/aliasdefinition.go b/typedapi/types/aliasdefinition.go index a70e634609..352681b892 100644 --- a/typedapi/types/aliasdefinition.go +++ b/typedapi/types/aliasdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AliasDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/AliasDefinition.ts#L22-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/AliasDefinition.ts#L22-L54 type AliasDefinition struct { // Filter Query used to limit documents the alias can access. Filter *Query `json:"filter,omitempty"` @@ -66,13 +67,13 @@ func (s *AliasDefinition) UnmarshalJSON(data []byte) error { case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "index_routing": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexRouting", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -88,7 +89,7 @@ func (s *AliasDefinition) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsHidden", err) } s.IsHidden = &value case bool: @@ -102,7 +103,7 @@ func (s *AliasDefinition) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsWriteIndex", err) } s.IsWriteIndex = &value case bool: @@ -112,7 +113,7 @@ func (s *AliasDefinition) UnmarshalJSON(data []byte) error { case "routing": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -124,7 +125,7 @@ func (s *AliasDefinition) UnmarshalJSON(data []byte) error { case "search_routing": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchRouting", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/aliasesrecord.go b/typedapi/types/aliasesrecord.go index 627196f3c4..ed90394880 100644 --- a/typedapi/types/aliasesrecord.go +++ b/typedapi/types/aliasesrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AliasesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/aliases/types.ts#L22-L53 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/aliases/types.ts#L22-L53 type AliasesRecord struct { // Alias alias name Alias *string `json:"alias,omitempty"` @@ -64,7 +65,7 @@ func (s *AliasesRecord) UnmarshalJSON(data []byte) error { case "alias", "a": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Alias", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -76,7 +77,7 @@ func (s *AliasesRecord) UnmarshalJSON(data []byte) error { case "filter", "f", "fi": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -87,13 +88,13 @@ func (s *AliasesRecord) UnmarshalJSON(data []byte) error { case "index", "i", "idx": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "is_write_index", "w", "isWriteIndex": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IsWriteIndex", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +106,7 @@ func (s *AliasesRecord) UnmarshalJSON(data []byte) error { case "routing.index", "ri", "routingIndex": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RoutingIndex", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -117,7 +118,7 @@ func (s *AliasesRecord) UnmarshalJSON(data []byte) error { case "routing.search", "rs", "routingSearch": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RoutingSearch", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/allfield.go b/typedapi/types/allfield.go index 029287c5ed..c5605061b3 100644 --- a/typedapi/types/allfield.go +++ b/typedapi/types/allfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AllField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/meta-fields.ts#L29-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/meta-fields.ts#L29-L40 type AllField struct { Analyzer string `json:"analyzer"` Enabled bool `json:"enabled"` @@ -62,7 +63,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,7 +79,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -92,7 +93,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OmitNorms", err) } s.OmitNorms = value case bool: @@ -102,7 +103,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { case "search_analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchAnalyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -114,7 +115,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -130,7 +131,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = value case bool: @@ -144,7 +145,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "StoreTermVectorOffsets", err) } s.StoreTermVectorOffsets = value case bool: @@ -158,7 +159,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "StoreTermVectorPayloads", err) } s.StoreTermVectorPayloads = value case bool: @@ -172,7 +173,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "StoreTermVectorPositions", err) } s.StoreTermVectorPositions = value case bool: @@ -186,7 +187,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "StoreTermVectors", err) } s.StoreTermVectors = value case bool: diff --git a/typedapi/types/allocationdecision.go b/typedapi/types/allocationdecision.go index 6215228f0f..4f5b10e85e 100644 --- a/typedapi/types/allocationdecision.go +++ b/typedapi/types/allocationdecision.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // AllocationDecision type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/types.ts#L26-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/types.ts#L26-L30 type AllocationDecision struct { Decider string `json:"decider"` Decision allocationexplaindecision.AllocationExplainDecision `json:"decision"` @@ -57,7 +58,7 @@ func (s *AllocationDecision) UnmarshalJSON(data []byte) error { case "decider": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Decider", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,13 +69,13 @@ func (s *AllocationDecision) UnmarshalJSON(data []byte) error { case "decision": if err := dec.Decode(&s.Decision); err != nil { - return err + return fmt.Errorf("%s | %w", "Decision", err) } case "explanation": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Explanation", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/allocationrecord.go b/typedapi/types/allocationrecord.go index 68e5db5e6e..ff6ed0007b 100644 --- a/typedapi/types/allocationrecord.go +++ b/typedapi/types/allocationrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AllocationRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/allocation/types.ts#L24-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/allocation/types.ts#L24-L75 type AllocationRecord struct { // DiskAvail Free disk space available to Elasticsearch. // Elasticsearch retrieves this metric from the node’s operating system. @@ -82,43 +83,43 @@ func (s *AllocationRecord) UnmarshalJSON(data []byte) error { case "disk.avail", "da", "diskAvail": if err := dec.Decode(&s.DiskAvail); err != nil { - return err + return fmt.Errorf("%s | %w", "DiskAvail", err) } case "disk.indices", "di", "diskIndices": if err := dec.Decode(&s.DiskIndices); err != nil { - return err + return fmt.Errorf("%s | %w", "DiskIndices", err) } case "disk.percent", "dp", "diskPercent": if err := dec.Decode(&s.DiskPercent); err != nil { - return err + return fmt.Errorf("%s | %w", "DiskPercent", err) } case "disk.total", "dt", "diskTotal": if err := dec.Decode(&s.DiskTotal); err != nil { - return err + return fmt.Errorf("%s | %w", "DiskTotal", err) } case "disk.used", "du", "diskUsed": if err := dec.Decode(&s.DiskUsed); err != nil { - return err + return fmt.Errorf("%s | %w", "DiskUsed", err) } case "host", "h": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "ip": if err := dec.Decode(&s.Ip); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } case "node", "n": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -130,7 +131,7 @@ func (s *AllocationRecord) UnmarshalJSON(data []byte) error { case "shards", "s": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/allocationstore.go b/typedapi/types/allocationstore.go index ac45a1ece2..2ea66bb0e5 100644 --- a/typedapi/types/allocationstore.go +++ b/typedapi/types/allocationstore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AllocationStore type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/types.ts#L39-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/types.ts#L39-L46 type AllocationStore struct { AllocationId string `json:"allocation_id"` Found bool `json:"found"` @@ -58,7 +59,7 @@ func (s *AllocationStore) UnmarshalJSON(data []byte) error { case "allocation_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AllocationId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,7 +75,7 @@ func (s *AllocationStore) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Found", err) } s.Found = value case bool: @@ -88,7 +89,7 @@ func (s *AllocationStore) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "InSync", err) } s.InSync = value case bool: @@ -102,7 +103,7 @@ func (s *AllocationStore) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MatchingSizeInBytes", err) } s.MatchingSizeInBytes = value case float64: @@ -117,7 +118,7 @@ func (s *AllocationStore) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MatchingSyncId", err) } s.MatchingSyncId = value case bool: @@ -127,7 +128,7 @@ func (s *AllocationStore) UnmarshalJSON(data []byte) error { case "store_exception": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "StoreException", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/alwayscondition.go b/typedapi/types/alwayscondition.go index 61f68ac0d2..a50420138d 100644 --- a/typedapi/types/alwayscondition.go +++ b/typedapi/types/alwayscondition.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // AlwaysCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Conditions.ts#L25-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Conditions.ts#L25-L25 type AlwaysCondition struct { } diff --git a/typedapi/types/analysisconfig.go b/typedapi/types/analysisconfig.go index 6da6ef7159..830a006dd2 100644 --- a/typedapi/types/analysisconfig.go +++ b/typedapi/types/analysisconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AnalysisConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Analysis.ts#L29-L77 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Analysis.ts#L29-L77 type AnalysisConfig struct { // BucketSpan The size of the interval that the analysis is aggregated into, typically // between `5m` and `1h`. This value should be either a whole number of days or @@ -129,7 +130,7 @@ func (s *AnalysisConfig) UnmarshalJSON(data []byte) error { case "bucket_span": if err := dec.Decode(&s.BucketSpan); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketSpan", err) } case "categorization_analyzer": @@ -148,38 +149,38 @@ func (s *AnalysisConfig) UnmarshalJSON(data []byte) error { default: if err := localDec.Decode(&s.CategorizationAnalyzer); err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizationAnalyzer", err) } } case "categorization_field_name": if err := dec.Decode(&s.CategorizationFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizationFieldName", err) } case "categorization_filters": if err := dec.Decode(&s.CategorizationFilters); err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizationFilters", err) } case "detectors": if err := dec.Decode(&s.Detectors); err != nil { - return err + return fmt.Errorf("%s | %w", "Detectors", err) } case "influencers": if err := dec.Decode(&s.Influencers); err != nil { - return err + return fmt.Errorf("%s | %w", "Influencers", err) } case "latency": if err := dec.Decode(&s.Latency); err != nil { - return err + return fmt.Errorf("%s | %w", "Latency", err) } case "model_prune_window": if err := dec.Decode(&s.ModelPruneWindow); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelPruneWindow", err) } case "multivariate_by_fields": @@ -189,7 +190,7 @@ func (s *AnalysisConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MultivariateByFields", err) } s.MultivariateByFields = &value case bool: @@ -198,12 +199,12 @@ func (s *AnalysisConfig) UnmarshalJSON(data []byte) error { case "per_partition_categorization": if err := dec.Decode(&s.PerPartitionCategorization); err != nil { - return err + return fmt.Errorf("%s | %w", "PerPartitionCategorization", err) } case "summary_count_field_name": if err := dec.Decode(&s.SummaryCountFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "SummaryCountFieldName", err) } } diff --git a/typedapi/types/analysisconfigread.go b/typedapi/types/analysisconfigread.go index f8272cd5eb..d0ec5ff5be 100644 --- a/typedapi/types/analysisconfigread.go +++ b/typedapi/types/analysisconfigread.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AnalysisConfigRead type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Analysis.ts#L79-L148 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Analysis.ts#L79-L148 type AnalysisConfigRead struct { // BucketSpan The size of the interval that the analysis is aggregated into, typically // between `5m` and `1h`. @@ -116,7 +117,7 @@ func (s *AnalysisConfigRead) UnmarshalJSON(data []byte) error { case "bucket_span": if err := dec.Decode(&s.BucketSpan); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketSpan", err) } case "categorization_analyzer": @@ -135,38 +136,38 @@ func (s *AnalysisConfigRead) UnmarshalJSON(data []byte) error { default: if err := localDec.Decode(&s.CategorizationAnalyzer); err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizationAnalyzer", err) } } case "categorization_field_name": if err := dec.Decode(&s.CategorizationFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizationFieldName", err) } case "categorization_filters": if err := dec.Decode(&s.CategorizationFilters); err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizationFilters", err) } case "detectors": if err := dec.Decode(&s.Detectors); err != nil { - return err + return fmt.Errorf("%s | %w", "Detectors", err) } case "influencers": if err := dec.Decode(&s.Influencers); err != nil { - return err + return fmt.Errorf("%s | %w", "Influencers", err) } case "latency": if err := dec.Decode(&s.Latency); err != nil { - return err + return fmt.Errorf("%s | %w", "Latency", err) } case "model_prune_window": if err := dec.Decode(&s.ModelPruneWindow); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelPruneWindow", err) } case "multivariate_by_fields": @@ -176,7 +177,7 @@ func (s *AnalysisConfigRead) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MultivariateByFields", err) } s.MultivariateByFields = &value case bool: @@ -185,12 +186,12 @@ func (s *AnalysisConfigRead) UnmarshalJSON(data []byte) error { case "per_partition_categorization": if err := dec.Decode(&s.PerPartitionCategorization); err != nil { - return err + return fmt.Errorf("%s | %w", "PerPartitionCategorization", err) } case "summary_count_field_name": if err := dec.Decode(&s.SummaryCountFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "SummaryCountFieldName", err) } } diff --git a/typedapi/types/analysislimits.go b/typedapi/types/analysislimits.go index bd4467a817..db15bac22c 100644 --- a/typedapi/types/analysislimits.go +++ b/typedapi/types/analysislimits.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AnalysisLimits type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Analysis.ts#L161-L172 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Analysis.ts#L161-L172 type AnalysisLimits struct { // CategorizationExamplesLimit The maximum number of examples stored per category in memory and in the // results data store. If you increase this value, more examples are available, @@ -79,7 +80,7 @@ func (s *AnalysisLimits) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizationExamplesLimit", err) } s.CategorizationExamplesLimit = &value case float64: @@ -90,7 +91,7 @@ func (s *AnalysisLimits) UnmarshalJSON(data []byte) error { case "model_memory_limit": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelMemoryLimit", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/analysismemorylimit.go b/typedapi/types/analysismemorylimit.go index 0a4a637418..56d425987d 100644 --- a/typedapi/types/analysismemorylimit.go +++ b/typedapi/types/analysismemorylimit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AnalysisMemoryLimit type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Analysis.ts#L174-L179 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Analysis.ts#L174-L179 type AnalysisMemoryLimit struct { // ModelMemoryLimit Limits can be applied for the resources required to hold the mathematical // models in memory. These limits are approximate and can be set per job. They @@ -57,7 +58,7 @@ func (s *AnalysisMemoryLimit) UnmarshalJSON(data []byte) error { case "model_memory_limit": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelMemoryLimit", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/analytics.go b/typedapi/types/analytics.go index 498fb740d2..6e4e5de853 100644 --- a/typedapi/types/analytics.go +++ b/typedapi/types/analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Analytics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L330-L332 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L330-L332 type Analytics struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -59,7 +60,7 @@ func (s *Analytics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -73,7 +74,7 @@ func (s *Analytics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -82,7 +83,7 @@ func (s *Analytics) UnmarshalJSON(data []byte) error { case "stats": if err := dec.Decode(&s.Stats); err != nil { - return err + return fmt.Errorf("%s | %w", "Stats", err) } } diff --git a/typedapi/types/analyticscollection.go b/typedapi/types/analyticscollection.go index cf913abf7b..3794cd2610 100644 --- a/typedapi/types/analyticscollection.go +++ b/typedapi/types/analyticscollection.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // AnalyticsCollection type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/_types/BehavioralAnalytics.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/_types/BehavioralAnalytics.ts#L22-L27 type AnalyticsCollection struct { // EventDataStream Data stream for the collection. EventDataStream EventDataStream `json:"event_data_stream"` diff --git a/typedapi/types/analyticsstatistics.go b/typedapi/types/analyticsstatistics.go index 28902f615e..f99ea3a89f 100644 --- a/typedapi/types/analyticsstatistics.go +++ b/typedapi/types/analyticsstatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AnalyticsStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L61-L71 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L61-L71 type AnalyticsStatistics struct { BoxplotUsage int64 `json:"boxplot_usage"` CumulativeCardinalityUsage int64 `json:"cumulative_cardinality_usage"` @@ -65,7 +66,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BoxplotUsage", err) } s.BoxplotUsage = value case float64: @@ -80,7 +81,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CumulativeCardinalityUsage", err) } s.CumulativeCardinalityUsage = value case float64: @@ -95,7 +96,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MovingPercentilesUsage", err) } s.MovingPercentilesUsage = value case float64: @@ -110,7 +111,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MultiTermsUsage", err) } s.MultiTermsUsage = &value case float64: @@ -125,7 +126,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NormalizeUsage", err) } s.NormalizeUsage = value case float64: @@ -140,7 +141,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RateUsage", err) } s.RateUsage = value case float64: @@ -155,7 +156,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "StringStatsUsage", err) } s.StringStatsUsage = value case float64: @@ -170,7 +171,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TTestUsage", err) } s.TTestUsage = value case float64: @@ -185,7 +186,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TopMetricsUsage", err) } s.TopMetricsUsage = value case float64: diff --git a/typedapi/types/analyzedetail.go b/typedapi/types/analyzedetail.go index 60e158c3d2..4810acaa90 100644 --- a/typedapi/types/analyzedetail.go +++ b/typedapi/types/analyzedetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AnalyzeDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/analyze/types.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/analyze/types.ts#L24-L30 type AnalyzeDetail struct { Analyzer *AnalyzerDetail `json:"analyzer,omitempty"` Charfilters []CharFilterDetail `json:"charfilters,omitempty"` @@ -56,12 +57,12 @@ func (s *AnalyzeDetail) UnmarshalJSON(data []byte) error { case "analyzer": if err := dec.Decode(&s.Analyzer); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } case "charfilters": if err := dec.Decode(&s.Charfilters); err != nil { - return err + return fmt.Errorf("%s | %w", "Charfilters", err) } case "custom_analyzer": @@ -71,7 +72,7 @@ func (s *AnalyzeDetail) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CustomAnalyzer", err) } s.CustomAnalyzer = value case bool: @@ -80,12 +81,12 @@ func (s *AnalyzeDetail) UnmarshalJSON(data []byte) error { case "tokenfilters": if err := dec.Decode(&s.Tokenfilters); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenfilters", err) } case "tokenizer": if err := dec.Decode(&s.Tokenizer); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenizer", err) } } diff --git a/typedapi/types/analyzer.go b/typedapi/types/analyzer.go index bcaf015031..3675a34e81 100644 --- a/typedapi/types/analyzer.go +++ b/typedapi/types/analyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -37,5 +37,5 @@ package types // SnowballAnalyzer // DutchAnalyzer // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/analyzers.ts#L113-L131 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/analyzers.ts#L113-L131 type Analyzer interface{} diff --git a/typedapi/types/analyzerdetail.go b/typedapi/types/analyzerdetail.go index ba05c1d9a8..5d68a9cb6d 100644 --- a/typedapi/types/analyzerdetail.go +++ b/typedapi/types/analyzerdetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AnalyzerDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/analyze/types.ts#L32-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/analyze/types.ts#L32-L35 type AnalyzerDetail struct { Name string `json:"name"` Tokens []ExplainAnalyzeToken `json:"tokens"` @@ -54,7 +55,7 @@ func (s *AnalyzerDetail) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -65,7 +66,7 @@ func (s *AnalyzerDetail) UnmarshalJSON(data []byte) error { case "tokens": if err := dec.Decode(&s.Tokens); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokens", err) } } diff --git a/typedapi/types/analyzetoken.go b/typedapi/types/analyzetoken.go index 348f230204..cfa71ee5e0 100644 --- a/typedapi/types/analyzetoken.go +++ b/typedapi/types/analyzetoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AnalyzeToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/analyze/types.ts#L37-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/analyze/types.ts#L37-L44 type AnalyzeToken struct { EndOffset int64 `json:"end_offset"` Position int64 `json:"position"` @@ -62,7 +63,7 @@ func (s *AnalyzeToken) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "EndOffset", err) } s.EndOffset = value case float64: @@ -77,7 +78,7 @@ func (s *AnalyzeToken) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Position", err) } s.Position = value case float64: @@ -92,7 +93,7 @@ func (s *AnalyzeToken) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PositionLength", err) } s.PositionLength = &value case float64: @@ -107,7 +108,7 @@ func (s *AnalyzeToken) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "StartOffset", err) } s.StartOffset = value case float64: @@ -118,7 +119,7 @@ func (s *AnalyzeToken) UnmarshalJSON(data []byte) error { case "token": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Token", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -130,7 +131,7 @@ func (s *AnalyzeToken) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/anomaly.go b/typedapi/types/anomaly.go index f82120d3d1..4034e7e080 100644 --- a/typedapi/types/anomaly.go +++ b/typedapi/types/anomaly.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Anomaly type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Anomaly.ts#L24-L121 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Anomaly.ts#L24-L121 type Anomaly struct { // Actual The actual value for the bucket. Actual []Float64 `json:"actual,omitempty"` @@ -128,23 +129,23 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "actual": if err := dec.Decode(&s.Actual); err != nil { - return err + return fmt.Errorf("%s | %w", "Actual", err) } case "anomaly_score_explanation": if err := dec.Decode(&s.AnomalyScoreExplanation); err != nil { - return err + return fmt.Errorf("%s | %w", "AnomalyScoreExplanation", err) } case "bucket_span": if err := dec.Decode(&s.BucketSpan); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketSpan", err) } case "by_field_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ByFieldName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -156,7 +157,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "by_field_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ByFieldValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -167,7 +168,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "causes": if err := dec.Decode(&s.Causes); err != nil { - return err + return fmt.Errorf("%s | %w", "Causes", err) } case "detector_index": @@ -178,7 +179,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DetectorIndex", err) } s.DetectorIndex = value case float64: @@ -189,7 +190,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "field_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -201,7 +202,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "function": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Function", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -213,7 +214,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "function_description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FunctionDescription", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -224,12 +225,12 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "geo_results": if err := dec.Decode(&s.GeoResults); err != nil { - return err + return fmt.Errorf("%s | %w", "GeoResults", err) } case "influencers": if err := dec.Decode(&s.Influencers); err != nil { - return err + return fmt.Errorf("%s | %w", "Influencers", err) } case "initial_record_score": @@ -239,7 +240,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "InitialRecordScore", err) } f := Float64(value) s.InitialRecordScore = f @@ -255,7 +256,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsInterim", err) } s.IsInterim = value case bool: @@ -265,7 +266,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "job_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -277,7 +278,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "over_field_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "OverFieldName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -289,7 +290,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "over_field_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "OverFieldValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -301,7 +302,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "partition_field_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PartitionFieldName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -313,7 +314,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "partition_field_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PartitionFieldValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -329,7 +330,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Probability", err) } f := Float64(value) s.Probability = f @@ -345,7 +346,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RecordScore", err) } f := Float64(value) s.RecordScore = f @@ -357,7 +358,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "result_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -368,12 +369,12 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } case "typical": if err := dec.Decode(&s.Typical); err != nil { - return err + return fmt.Errorf("%s | %w", "Typical", err) } } diff --git a/typedapi/types/anomalycause.go b/typedapi/types/anomalycause.go index 943a814544..c509f96143 100644 --- a/typedapi/types/anomalycause.go +++ b/typedapi/types/anomalycause.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AnomalyCause type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Anomaly.ts#L123-L138 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Anomaly.ts#L123-L138 type AnomalyCause struct { Actual []Float64 `json:"actual"` ByFieldName string `json:"by_field_name"` @@ -65,18 +66,18 @@ func (s *AnomalyCause) UnmarshalJSON(data []byte) error { case "actual": if err := dec.Decode(&s.Actual); err != nil { - return err + return fmt.Errorf("%s | %w", "Actual", err) } case "by_field_name": if err := dec.Decode(&s.ByFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "ByFieldName", err) } case "by_field_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ByFieldValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -88,7 +89,7 @@ func (s *AnomalyCause) UnmarshalJSON(data []byte) error { case "correlated_by_field_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CorrelatedByFieldValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -99,13 +100,13 @@ func (s *AnomalyCause) UnmarshalJSON(data []byte) error { case "field_name": if err := dec.Decode(&s.FieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldName", err) } case "function": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Function", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -117,7 +118,7 @@ func (s *AnomalyCause) UnmarshalJSON(data []byte) error { case "function_description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FunctionDescription", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -128,18 +129,18 @@ func (s *AnomalyCause) UnmarshalJSON(data []byte) error { case "influencers": if err := dec.Decode(&s.Influencers); err != nil { - return err + return fmt.Errorf("%s | %w", "Influencers", err) } case "over_field_name": if err := dec.Decode(&s.OverFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "OverFieldName", err) } case "over_field_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "OverFieldValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -151,7 +152,7 @@ func (s *AnomalyCause) UnmarshalJSON(data []byte) error { case "partition_field_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PartitionFieldName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -163,7 +164,7 @@ func (s *AnomalyCause) UnmarshalJSON(data []byte) error { case "partition_field_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PartitionFieldValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -179,7 +180,7 @@ func (s *AnomalyCause) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Probability", err) } f := Float64(value) s.Probability = f @@ -190,7 +191,7 @@ func (s *AnomalyCause) UnmarshalJSON(data []byte) error { case "typical": if err := dec.Decode(&s.Typical); err != nil { - return err + return fmt.Errorf("%s | %w", "Typical", err) } } diff --git a/typedapi/types/anomalydetectors.go b/typedapi/types/anomalydetectors.go index bc4e0543bf..452d2069dd 100644 --- a/typedapi/types/anomalydetectors.go +++ b/typedapi/types/anomalydetectors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AnomalyDetectors type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/info/types.ts#L44-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/info/types.ts#L44-L50 type AnomalyDetectors struct { CategorizationAnalyzer CategorizationAnalyzer `json:"categorization_analyzer"` CategorizationExamplesLimit int `json:"categorization_examples_limit"` @@ -70,7 +71,7 @@ func (s *AnomalyDetectors) UnmarshalJSON(data []byte) error { default: if err := localDec.Decode(&s.CategorizationAnalyzer); err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizationAnalyzer", err) } } @@ -82,7 +83,7 @@ func (s *AnomalyDetectors) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizationExamplesLimit", err) } s.CategorizationExamplesLimit = value case float64: @@ -98,7 +99,7 @@ func (s *AnomalyDetectors) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DailyModelSnapshotRetentionAfterDays", err) } s.DailyModelSnapshotRetentionAfterDays = value case float64: @@ -109,7 +110,7 @@ func (s *AnomalyDetectors) UnmarshalJSON(data []byte) error { case "model_memory_limit": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelMemoryLimit", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -126,7 +127,7 @@ func (s *AnomalyDetectors) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSnapshotRetentionDays", err) } s.ModelSnapshotRetentionDays = value case float64: diff --git a/typedapi/types/anomalyexplanation.go b/typedapi/types/anomalyexplanation.go index a8c26603fc..5bdc779c7f 100644 --- a/typedapi/types/anomalyexplanation.go +++ b/typedapi/types/anomalyexplanation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AnomalyExplanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Anomaly.ts#L156-L197 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Anomaly.ts#L156-L197 type AnomalyExplanation struct { // AnomalyCharacteristicsImpact Impact from the duration and magnitude of the detected anomaly relative to // the historical average. @@ -81,7 +82,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AnomalyCharacteristicsImpact", err) } s.AnomalyCharacteristicsImpact = &value case float64: @@ -97,7 +98,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AnomalyLength", err) } s.AnomalyLength = &value case float64: @@ -108,7 +109,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case "anomaly_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AnomalyType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -124,7 +125,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "HighVariancePenalty", err) } s.HighVariancePenalty = &value case bool: @@ -138,7 +139,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncompleteBucketPenalty", err) } s.IncompleteBucketPenalty = &value case bool: @@ -152,7 +153,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LowerConfidenceBound", err) } f := Float64(value) s.LowerConfidenceBound = &f @@ -169,7 +170,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MultiBucketImpact", err) } s.MultiBucketImpact = &value case float64: @@ -185,7 +186,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SingleBucketImpact", err) } s.SingleBucketImpact = &value case float64: @@ -200,7 +201,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TypicalValue", err) } f := Float64(value) s.TypicalValue = &f @@ -216,7 +217,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UpperConfidenceBound", err) } f := Float64(value) s.UpperConfidenceBound = &f diff --git a/typedapi/types/apikey.go b/typedapi/types/apikey.go index 771a26bc1c..9c2c2d7c63 100644 --- a/typedapi/types/apikey.go +++ b/typedapi/types/apikey.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ApiKey type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/ApiKey.ts#L27-L77 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/ApiKey.ts#L27-L77 type ApiKey struct { // Creation Creation time for the API key in milliseconds. Creation *int64 `json:"creation,omitempty"` @@ -85,7 +86,7 @@ func (s *ApiKey) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Creation", err) } s.Creation = &value case float64: @@ -100,7 +101,7 @@ func (s *ApiKey) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Expiration", err) } s.Expiration = &value case float64: @@ -110,7 +111,7 @@ func (s *ApiKey) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "invalidated": @@ -120,7 +121,7 @@ func (s *ApiKey) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Invalidated", err) } s.Invalidated = &value case bool: @@ -129,23 +130,23 @@ func (s *ApiKey) UnmarshalJSON(data []byte) error { case "limited_by": if err := dec.Decode(&s.LimitedBy); err != nil { - return err + return fmt.Errorf("%s | %w", "LimitedBy", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "realm": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Realm", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -159,17 +160,17 @@ func (s *ApiKey) UnmarshalJSON(data []byte) error { s.RoleDescriptors = make(map[string]RoleDescriptor, 0) } if err := dec.Decode(&s.RoleDescriptors); err != nil { - return err + return fmt.Errorf("%s | %w", "RoleDescriptors", err) } case "_sort": if err := dec.Decode(&s.Sort_); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort_", err) } case "username": if err := dec.Decode(&s.Username); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } } diff --git a/typedapi/types/apikeyaggregate.go b/typedapi/types/apikeyaggregate.go new file mode 100644 index 0000000000..c7ddab3024 --- /dev/null +++ b/typedapi/types/apikeyaggregate.go @@ -0,0 +1,40 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +// APIKeyAggregate holds the union for the following types: +// +// CardinalityAggregate +// ValueCountAggregate +// StringTermsAggregate +// LongTermsAggregate +// DoubleTermsAggregate +// UnmappedTermsAggregate +// MultiTermsAggregate +// MissingAggregate +// FilterAggregate +// FiltersAggregate +// RangeAggregate +// DateRangeAggregate +// CompositeAggregate +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/query_api_keys/types.ts#L123-L140 +type APIKeyAggregate interface{} diff --git a/typedapi/types/apikeyaggregationcontainer.go b/typedapi/types/apikeyaggregationcontainer.go new file mode 100644 index 0000000000..94dce64799 --- /dev/null +++ b/typedapi/types/apikeyaggregationcontainer.go @@ -0,0 +1,154 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" +) + +// APIKeyAggregationContainer type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/query_api_keys/types.ts#L64-L121 +type APIKeyAggregationContainer struct { + // Aggregations Sub-aggregations for this aggregation. + // Only applies to bucket aggregations. + Aggregations map[string]APIKeyAggregationContainer `json:"aggregations,omitempty"` + // Cardinality A single-value metrics aggregation that calculates an approximate count of + // distinct values. + Cardinality *CardinalityAggregation `json:"cardinality,omitempty"` + // Composite A multi-bucket aggregation that creates composite buckets from different + // sources. + // Unlike the other multi-bucket aggregations, you can use the `composite` + // aggregation to paginate *all* buckets from a multi-level aggregation + // efficiently. + Composite *CompositeAggregation `json:"composite,omitempty"` + // DateRange A multi-bucket value source based aggregation that enables the user to define + // a set of date ranges - each representing a bucket. + DateRange *DateRangeAggregation `json:"date_range,omitempty"` + // Filter A single bucket aggregation that narrows the set of documents to those that + // match a query. + Filter *APIKeyQueryContainer `json:"filter,omitempty"` + // Filters A multi-bucket aggregation where each bucket contains the documents that + // match a query. + Filters *APIKeyFiltersAggregation `json:"filters,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Missing *MissingAggregation `json:"missing,omitempty"` + // Range A multi-bucket value source based aggregation that enables the user to define + // a set of ranges - each representing a bucket. + Range *RangeAggregation `json:"range,omitempty"` + // Terms A multi-bucket value source based aggregation where buckets are dynamically + // built - one per unique value. + Terms *TermsAggregation `json:"terms,omitempty"` + // ValueCount A single-value metrics aggregation that counts the number of values that are + // extracted from the aggregated documents. + ValueCount *ValueCountAggregation `json:"value_count,omitempty"` +} + +func (s *APIKeyAggregationContainer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aggregations", "aggs": + if s.Aggregations == nil { + s.Aggregations = make(map[string]APIKeyAggregationContainer, 0) + } + if err := dec.Decode(&s.Aggregations); err != nil { + return fmt.Errorf("%s | %w", "Aggregations", err) + } + + case "cardinality": + if err := dec.Decode(&s.Cardinality); err != nil { + return fmt.Errorf("%s | %w", "Cardinality", err) + } + + case "composite": + if err := dec.Decode(&s.Composite); err != nil { + return fmt.Errorf("%s | %w", "Composite", err) + } + + case "date_range": + if err := dec.Decode(&s.DateRange); err != nil { + return fmt.Errorf("%s | %w", "DateRange", err) + } + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return fmt.Errorf("%s | %w", "Filter", err) + } + + case "filters": + if err := dec.Decode(&s.Filters); err != nil { + return fmt.Errorf("%s | %w", "Filters", err) + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return fmt.Errorf("%s | %w", "Meta", err) + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return fmt.Errorf("%s | %w", "Missing", err) + } + + case "range": + if err := dec.Decode(&s.Range); err != nil { + return fmt.Errorf("%s | %w", "Range", err) + } + + case "terms": + if err := dec.Decode(&s.Terms); err != nil { + return fmt.Errorf("%s | %w", "Terms", err) + } + + case "value_count": + if err := dec.Decode(&s.ValueCount); err != nil { + return fmt.Errorf("%s | %w", "ValueCount", err) + } + + } + } + return nil +} + +// NewAPIKeyAggregationContainer returns a APIKeyAggregationContainer. +func NewAPIKeyAggregationContainer() *APIKeyAggregationContainer { + r := &APIKeyAggregationContainer{ + Aggregations: make(map[string]APIKeyAggregationContainer, 0), + } + + return r +} diff --git a/typedapi/types/apikeyauthorization.go b/typedapi/types/apikeyauthorization.go index db0981949a..4a134e41b9 100644 --- a/typedapi/types/apikeyauthorization.go +++ b/typedapi/types/apikeyauthorization.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ApiKeyAuthorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Authorization.ts#L20-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Authorization.ts#L20-L29 type ApiKeyAuthorization struct { // Id The identifier for the API key. Id string `json:"id"` @@ -56,7 +57,7 @@ func (s *ApiKeyAuthorization) UnmarshalJSON(data []byte) error { case "id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,7 +69,7 @@ func (s *ApiKeyAuthorization) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/apikeyfiltersaggregation.go b/typedapi/types/apikeyfiltersaggregation.go new file mode 100644 index 0000000000..06b9d71d25 --- /dev/null +++ b/typedapi/types/apikeyfiltersaggregation.go @@ -0,0 +1,153 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// APIKeyFiltersAggregation type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/query_api_keys/types.ts#L208-L228 +type APIKeyFiltersAggregation struct { + // Filters Collection of queries from which to build buckets. + Filters BucketsAPIKeyQueryContainer `json:"filters,omitempty"` + // Keyed By default, the named filters aggregation returns the buckets as an object. + // Set to `false` to return the buckets as an array of objects. + Keyed *bool `json:"keyed,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + // OtherBucket Set to `true` to add a bucket to the response which will contain all + // documents that do not match any of the given filters. + OtherBucket *bool `json:"other_bucket,omitempty"` + // OtherBucketKey The key with which the other bucket is returned. + OtherBucketKey *string `json:"other_bucket_key,omitempty"` +} + +func (s *APIKeyFiltersAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "filters": + + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + source := bytes.NewReader(rawMsg) + localDec := json.NewDecoder(source) + switch rawMsg[0] { + case '{': + o := make(map[string]APIKeyQueryContainer, 0) + if err := localDec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Filters", err) + } + s.Filters = o + case '[': + o := []APIKeyQueryContainer{} + if err := localDec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Filters", err) + } + s.Filters = o + } + + case "keyed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Keyed", err) + } + s.Keyed = &value + case bool: + s.Keyed = &v + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return fmt.Errorf("%s | %w", "Meta", err) + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Name", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Name = &o + + case "other_bucket": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "OtherBucket", err) + } + s.OtherBucket = &value + case bool: + s.OtherBucket = &v + } + + case "other_bucket_key": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "OtherBucketKey", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.OtherBucketKey = &o + + } + } + return nil +} + +// NewAPIKeyFiltersAggregation returns a APIKeyFiltersAggregation. +func NewAPIKeyFiltersAggregation() *APIKeyFiltersAggregation { + r := &APIKeyFiltersAggregation{} + + return r +} diff --git a/typedapi/types/apikeyquerycontainer.go b/typedapi/types/apikeyquerycontainer.go new file mode 100644 index 0000000000..a944409f20 --- /dev/null +++ b/typedapi/types/apikeyquerycontainer.go @@ -0,0 +1,69 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +// APIKeyQueryContainer type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/query_api_keys/types.ts#L142-L206 +type APIKeyQueryContainer struct { + // Bool matches documents matching boolean combinations of other queries. + Bool *BoolQuery `json:"bool,omitempty"` + // Exists Returns documents that contain an indexed value for a field. + Exists *ExistsQuery `json:"exists,omitempty"` + // Ids Returns documents based on their IDs. + // This query uses document IDs stored in the `_id` field. + Ids *IdsQuery `json:"ids,omitempty"` + // Match Returns documents that match a provided text, number, date or boolean value. + // The provided text is analyzed before matching. + Match map[string]MatchQuery `json:"match,omitempty"` + // MatchAll Matches all documents, giving them all a `_score` of 1.0. + MatchAll *MatchAllQuery `json:"match_all,omitempty"` + // Prefix Returns documents that contain a specific prefix in a provided field. + Prefix map[string]PrefixQuery `json:"prefix,omitempty"` + // Range Returns documents that contain terms within a provided range. + Range map[string]RangeQuery `json:"range,omitempty"` + // SimpleQueryString Returns documents based on a provided query string, using a parser with a + // limited but fault-tolerant syntax. + SimpleQueryString *SimpleQueryStringQuery `json:"simple_query_string,omitempty"` + // Term Returns documents that contain an exact term in a provided field. + // To return a document, the query term must exactly match the queried field's + // value, including whitespace and capitalization. + Term map[string]TermQuery `json:"term,omitempty"` + // Terms Returns documents that contain one or more exact terms in a provided field. + // To return a document, one or more terms must exactly match a field value, + // including whitespace and capitalization. + Terms *TermsQuery `json:"terms,omitempty"` + // Wildcard Returns documents that contain terms matching a wildcard pattern. + Wildcard map[string]WildcardQuery `json:"wildcard,omitempty"` +} + +// NewAPIKeyQueryContainer returns a APIKeyQueryContainer. +func NewAPIKeyQueryContainer() *APIKeyQueryContainer { + r := &APIKeyQueryContainer{ + Match: make(map[string]MatchQuery, 0), + Prefix: make(map[string]PrefixQuery, 0), + Range: make(map[string]RangeQuery, 0), + Term: make(map[string]TermQuery, 0), + Wildcard: make(map[string]WildcardQuery, 0), + } + + return r +} diff --git a/typedapi/types/appendprocessor.go b/typedapi/types/appendprocessor.go index 6edcf8e1b1..14b674bd17 100644 --- a/typedapi/types/appendprocessor.go +++ b/typedapi/types/appendprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AppendProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L279-L294 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L279-L294 type AppendProcessor struct { // AllowDuplicates If `false`, the processor does not append values already present in the // field. @@ -76,7 +77,7 @@ func (s *AppendProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowDuplicates", err) } s.AllowDuplicates = &value case bool: @@ -86,7 +87,7 @@ func (s *AppendProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -97,13 +98,13 @@ func (s *AppendProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -119,7 +120,7 @@ func (s *AppendProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -128,13 +129,13 @@ func (s *AppendProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -145,7 +146,7 @@ func (s *AppendProcessor) UnmarshalJSON(data []byte) error { case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } } diff --git a/typedapi/types/applicationglobaluserprivileges.go b/typedapi/types/applicationglobaluserprivileges.go index 726b13e560..1b07cc72e5 100644 --- a/typedapi/types/applicationglobaluserprivileges.go +++ b/typedapi/types/applicationglobaluserprivileges.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ApplicationGlobalUserPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/Privileges.ts#L193-L195 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/Privileges.ts#L193-L195 type ApplicationGlobalUserPrivileges struct { Manage ManageUserPrivileges `json:"manage"` } diff --git a/typedapi/types/applicationprivileges.go b/typedapi/types/applicationprivileges.go index a44418b1af..b15d98583e 100644 --- a/typedapi/types/applicationprivileges.go +++ b/typedapi/types/applicationprivileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ApplicationPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/Privileges.ts#L26-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/Privileges.ts#L26-L39 type ApplicationPrivileges struct { // Application The name of the application to which this entry applies. Application string `json:"application"` @@ -59,7 +60,7 @@ func (s *ApplicationPrivileges) UnmarshalJSON(data []byte) error { case "application": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Application", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -70,12 +71,12 @@ func (s *ApplicationPrivileges) UnmarshalJSON(data []byte) error { case "privileges": if err := dec.Decode(&s.Privileges); err != nil { - return err + return fmt.Errorf("%s | %w", "Privileges", err) } case "resources": if err := dec.Decode(&s.Resources); err != nil { - return err + return fmt.Errorf("%s | %w", "Resources", err) } } diff --git a/typedapi/types/applicationprivilegescheck.go b/typedapi/types/applicationprivilegescheck.go index bfb5df5abf..420f140877 100644 --- a/typedapi/types/applicationprivilegescheck.go +++ b/typedapi/types/applicationprivilegescheck.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ApplicationPrivilegesCheck type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/has_privileges/types.ts#L24-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/has_privileges/types.ts#L24-L31 type ApplicationPrivilegesCheck struct { // Application The name of the application. Application string `json:"application"` @@ -60,7 +61,7 @@ func (s *ApplicationPrivilegesCheck) UnmarshalJSON(data []byte) error { case "application": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Application", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -71,12 +72,12 @@ func (s *ApplicationPrivilegesCheck) UnmarshalJSON(data []byte) error { case "privileges": if err := dec.Decode(&s.Privileges); err != nil { - return err + return fmt.Errorf("%s | %w", "Privileges", err) } case "resources": if err := dec.Decode(&s.Resources); err != nil { - return err + return fmt.Errorf("%s | %w", "Resources", err) } } diff --git a/typedapi/types/applicationsprivileges.go b/typedapi/types/applicationsprivileges.go index 0f546ffbef..79be62fe5c 100644 --- a/typedapi/types/applicationsprivileges.go +++ b/typedapi/types/applicationsprivileges.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ApplicationsPrivileges type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/has_privileges/types.ts#L46-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/has_privileges/types.ts#L46-L46 type ApplicationsPrivileges map[string]ResourcePrivileges diff --git a/typedapi/types/archive.go b/typedapi/types/archive.go index c1e5c87c6d..d2c5754148 100644 --- a/typedapi/types/archive.go +++ b/typedapi/types/archive.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Archive type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L48-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L48-L50 type Archive struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -59,7 +60,7 @@ func (s *Archive) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -73,7 +74,7 @@ func (s *Archive) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -87,7 +88,7 @@ func (s *Archive) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesCount", err) } s.IndicesCount = value case float64: diff --git a/typedapi/types/arraycomparecondition.go b/typedapi/types/arraycomparecondition.go index fb7b0b7d7f..1a215489a2 100644 --- a/typedapi/types/arraycomparecondition.go +++ b/typedapi/types/arraycomparecondition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -33,7 +33,7 @@ import ( // ArrayCompareCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Conditions.ts#L32-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Conditions.ts#L32-L36 type ArrayCompareCondition struct { ArrayCompareCondition map[conditionop.ConditionOp]ArrayCompareOpParams `json:"ArrayCompareCondition,omitempty"` Path string `json:"path"` @@ -59,13 +59,13 @@ func (s *ArrayCompareCondition) UnmarshalJSON(data []byte) error { s.ArrayCompareCondition = make(map[conditionop.ConditionOp]ArrayCompareOpParams, 0) } if err := dec.Decode(&s.ArrayCompareCondition); err != nil { - return err + return fmt.Errorf("%s | %w", "ArrayCompareCondition", err) } case "path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/arraycompareopparams.go b/typedapi/types/arraycompareopparams.go index 0c1c57bfe2..ae4c6a4f36 100644 --- a/typedapi/types/arraycompareopparams.go +++ b/typedapi/types/arraycompareopparams.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/quantifier" @@ -31,7 +32,7 @@ import ( // ArrayCompareOpParams type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Conditions.ts#L27-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Conditions.ts#L27-L30 type ArrayCompareOpParams struct { Quantifier quantifier.Quantifier `json:"quantifier"` Value FieldValue `json:"value"` @@ -54,12 +55,12 @@ func (s *ArrayCompareOpParams) UnmarshalJSON(data []byte) error { case "quantifier": if err := dec.Decode(&s.Quantifier); err != nil { - return err + return fmt.Errorf("%s | %w", "Quantifier", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } } diff --git a/typedapi/types/arraypercentilesitem.go b/typedapi/types/arraypercentilesitem.go index 66e1628482..7c4ca121eb 100644 --- a/typedapi/types/arraypercentilesitem.go +++ b/typedapi/types/arraypercentilesitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ArrayPercentilesItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L160-L164 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L160-L164 type ArrayPercentilesItem struct { Key string `json:"key"` Value Float64 `json:"value,omitempty"` @@ -55,7 +56,7 @@ func (s *ArrayPercentilesItem) UnmarshalJSON(data []byte) error { case "key": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,13 +67,13 @@ func (s *ArrayPercentilesItem) UnmarshalJSON(data []byte) error { case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/asciifoldingtokenfilter.go b/typedapi/types/asciifoldingtokenfilter.go index 22d4f04a2d..a4d5f95c2a 100644 --- a/typedapi/types/asciifoldingtokenfilter.go +++ b/typedapi/types/asciifoldingtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // AsciiFoldingTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L168-L171 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L168-L171 type AsciiFoldingTokenFilter struct { PreserveOriginal Stringifiedboolean `json:"preserve_original,omitempty"` Type string `json:"type,omitempty"` @@ -53,17 +54,17 @@ func (s *AsciiFoldingTokenFilter) UnmarshalJSON(data []byte) error { case "preserve_original": if err := dec.Decode(&s.PreserveOriginal); err != nil { - return err + return fmt.Errorf("%s | %w", "PreserveOriginal", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/asyncsearch.go b/typedapi/types/asyncsearch.go index 8ab32005a9..3f1ebe8c9d 100644 --- a/typedapi/types/asyncsearch.go +++ b/typedapi/types/asyncsearch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" "strings" @@ -31,7 +32,7 @@ import ( // AsyncSearch type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/async_search/_types/AsyncSearch.ts#L30-L56 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/async_search/_types/AsyncSearch.ts#L30-L56 type AsyncSearch struct { // Aggregations Partial aggregations results, coming from the shards that have already // completed the execution of the query. @@ -98,490 +99,490 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -591,7 +592,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } @@ -600,7 +601,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { case "_clusters": if err := dec.Decode(&s.Clusters_); err != nil { - return err + return fmt.Errorf("%s | %w", "Clusters_", err) } case "fields": @@ -608,12 +609,12 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "hits": if err := dec.Decode(&s.Hits); err != nil { - return err + return fmt.Errorf("%s | %w", "Hits", err) } case "max_score": @@ -623,7 +624,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxScore", err) } f := Float64(value) s.MaxScore = &f @@ -639,7 +640,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumReducePhases", err) } s.NumReducePhases = &value case float64: @@ -649,22 +650,22 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { case "pit_id": if err := dec.Decode(&s.PitId); err != nil { - return err + return fmt.Errorf("%s | %w", "PitId", err) } case "profile": if err := dec.Decode(&s.Profile); err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } case "_scroll_id": if err := dec.Decode(&s.ScrollId_); err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollId_", err) } case "_shards": if err := dec.Decode(&s.Shards_); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards_", err) } case "suggest": @@ -692,28 +693,28 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { case "completion": o := NewCompletionSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "phrase": o := NewPhraseSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "term": o := NewTermSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) } @@ -723,7 +724,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[value] = append(s.Suggest[value], o) } @@ -737,7 +738,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TerminatedEarly", err) } s.TerminatedEarly = &value case bool: @@ -751,7 +752,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimedOut", err) } s.TimedOut = value case bool: @@ -765,7 +766,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } s.Took = value case float64: diff --git a/typedapi/types/attachmentprocessor.go b/typedapi/types/attachmentprocessor.go index d76b0b049a..899f681015 100644 --- a/typedapi/types/attachmentprocessor.go +++ b/typedapi/types/attachmentprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AttachmentProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L296-L337 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L296-L337 type AttachmentProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -87,7 +88,7 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -98,13 +99,13 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -120,7 +121,7 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -134,7 +135,7 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -148,7 +149,7 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexedChars", err) } s.IndexedChars = &value case float64: @@ -158,17 +159,17 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { case "indexed_chars_field": if err := dec.Decode(&s.IndexedCharsField); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexedCharsField", err) } case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "properties": if err := dec.Decode(&s.Properties); err != nil { - return err + return fmt.Errorf("%s | %w", "Properties", err) } case "remove_binary": @@ -178,7 +179,7 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RemoveBinary", err) } s.RemoveBinary = &value case bool: @@ -188,7 +189,7 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { case "resource_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResourceName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -200,7 +201,7 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -211,7 +212,7 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/audit.go b/typedapi/types/audit.go index e4dda627a3..7395e16a9d 100644 --- a/typedapi/types/audit.go +++ b/typedapi/types/audit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Audit type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L73-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L73-L75 type Audit struct { Enabled bool `json:"enabled"` Outputs []string `json:"outputs,omitempty"` @@ -58,7 +59,7 @@ func (s *Audit) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -67,7 +68,7 @@ func (s *Audit) UnmarshalJSON(data []byte) error { case "outputs": if err := dec.Decode(&s.Outputs); err != nil { - return err + return fmt.Errorf("%s | %w", "Outputs", err) } } diff --git a/typedapi/types/authenticateduser.go b/typedapi/types/authenticateduser.go index 39a14d196e..57dc5f66cb 100644 --- a/typedapi/types/authenticateduser.go +++ b/typedapi/types/authenticateduser.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AuthenticatedUser type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_token/types.ts#L40-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_token/types.ts#L40-L45 type AuthenticatedUser struct { AuthenticationProvider *AuthenticationProvider `json:"authentication_provider,omitempty"` AuthenticationRealm UserRealm `json:"authentication_realm"` @@ -62,18 +63,18 @@ func (s *AuthenticatedUser) UnmarshalJSON(data []byte) error { case "authentication_provider": if err := dec.Decode(&s.AuthenticationProvider); err != nil { - return err + return fmt.Errorf("%s | %w", "AuthenticationProvider", err) } case "authentication_realm": if err := dec.Decode(&s.AuthenticationRealm); err != nil { - return err + return fmt.Errorf("%s | %w", "AuthenticationRealm", err) } case "authentication_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AuthenticationType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,7 +86,7 @@ func (s *AuthenticatedUser) UnmarshalJSON(data []byte) error { case "email": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Email", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -101,7 +102,7 @@ func (s *AuthenticatedUser) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -110,32 +111,32 @@ func (s *AuthenticatedUser) UnmarshalJSON(data []byte) error { case "full_name": if err := dec.Decode(&s.FullName); err != nil { - return err + return fmt.Errorf("%s | %w", "FullName", err) } case "lookup_realm": if err := dec.Decode(&s.LookupRealm); err != nil { - return err + return fmt.Errorf("%s | %w", "LookupRealm", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "profile_uid": if err := dec.Decode(&s.ProfileUid); err != nil { - return err + return fmt.Errorf("%s | %w", "ProfileUid", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "username": if err := dec.Decode(&s.Username); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } } diff --git a/typedapi/types/authenticatetoken.go b/typedapi/types/authenticatetoken.go index a1ddeae72f..d4d7aaca86 100644 --- a/typedapi/types/authenticatetoken.go +++ b/typedapi/types/authenticatetoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AuthenticateToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/authenticate/types.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/authenticate/types.ts#L22-L29 type AuthenticateToken struct { Name string `json:"name"` Type *string `json:"type,omitempty"` @@ -53,13 +54,13 @@ func (s *AuthenticateToken) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/authenticationprovider.go b/typedapi/types/authenticationprovider.go index 9b9b2267fb..62d0180001 100644 --- a/typedapi/types/authenticationprovider.go +++ b/typedapi/types/authenticationprovider.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AuthenticationProvider type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_token/types.ts#L35-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_token/types.ts#L35-L38 type AuthenticationProvider struct { Name string `json:"name"` Type string `json:"type"` @@ -53,13 +54,13 @@ func (s *AuthenticationProvider) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/autodatehistogramaggregate.go b/typedapi/types/autodatehistogramaggregate.go index 53384f8e3c..c04dded0c8 100644 --- a/typedapi/types/autodatehistogramaggregate.go +++ b/typedapi/types/autodatehistogramaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // AutoDateHistogramAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L356-L360 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L356-L360 type AutoDateHistogramAggregate struct { Buckets BucketsDateHistogramBucket `json:"buckets"` Interval string `json:"interval"` @@ -61,25 +62,25 @@ func (s *AutoDateHistogramAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]DateHistogramBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []DateHistogramBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "interval": if err := dec.Decode(&s.Interval); err != nil { - return err + return fmt.Errorf("%s | %w", "Interval", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/autodatehistogramaggregation.go b/typedapi/types/autodatehistogramaggregation.go index d7e3c90f50..58071497aa 100644 --- a/typedapi/types/autodatehistogramaggregation.go +++ b/typedapi/types/autodatehistogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // AutoDateHistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L65-L100 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L65-L100 type AutoDateHistogramAggregation struct { // Buckets The target number of buckets. Buckets *int `json:"buckets,omitempty"` @@ -82,7 +83,7 @@ func (s *AutoDateHistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = &value case float64: @@ -92,13 +93,13 @@ func (s *AutoDateHistogramAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,23 +110,23 @@ func (s *AutoDateHistogramAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "minimum_interval": if err := dec.Decode(&s.MinimumInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumInterval", err) } case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -137,7 +138,7 @@ func (s *AutoDateHistogramAggregation) UnmarshalJSON(data []byte) error { case "offset": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Offset", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -151,13 +152,13 @@ func (s *AutoDateHistogramAggregation) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -166,7 +167,7 @@ func (s *AutoDateHistogramAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -175,7 +176,7 @@ func (s *AutoDateHistogramAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -183,7 +184,7 @@ func (s *AutoDateHistogramAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -192,7 +193,7 @@ func (s *AutoDateHistogramAggregation) UnmarshalJSON(data []byte) error { case "time_zone": if err := dec.Decode(&s.TimeZone); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeZone", err) } } diff --git a/typedapi/types/autofollowedcluster.go b/typedapi/types/autofollowedcluster.go index cb393d056e..e58e48d299 100644 --- a/typedapi/types/autofollowedcluster.go +++ b/typedapi/types/autofollowedcluster.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // AutoFollowedCluster type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/stats/types.ts.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/stats/types.ts.ts#L27-L31 type AutoFollowedCluster struct { ClusterName string `json:"cluster_name"` LastSeenMetadataVersion int64 `json:"last_seen_metadata_version"` @@ -53,17 +54,17 @@ func (s *AutoFollowedCluster) UnmarshalJSON(data []byte) error { case "cluster_name": if err := dec.Decode(&s.ClusterName); err != nil { - return err + return fmt.Errorf("%s | %w", "ClusterName", err) } case "last_seen_metadata_version": if err := dec.Decode(&s.LastSeenMetadataVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "LastSeenMetadataVersion", err) } case "time_since_last_check_millis": if err := dec.Decode(&s.TimeSinceLastCheckMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSinceLastCheckMillis", err) } } diff --git a/typedapi/types/autofollowpattern.go b/typedapi/types/autofollowpattern.go index 2bb7efc4a2..bcffe1688b 100644 --- a/typedapi/types/autofollowpattern.go +++ b/typedapi/types/autofollowpattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // AutoFollowPattern type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/get_auto_follow_pattern/types.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/get_auto_follow_pattern/types.ts#L23-L26 type AutoFollowPattern struct { Name string `json:"name"` Pattern AutoFollowPatternSummary `json:"pattern"` @@ -52,12 +53,12 @@ func (s *AutoFollowPattern) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "pattern": if err := dec.Decode(&s.Pattern); err != nil { - return err + return fmt.Errorf("%s | %w", "Pattern", err) } } diff --git a/typedapi/types/autofollowpatternsummary.go b/typedapi/types/autofollowpatternsummary.go index f4873abc35..e6d3ee401d 100644 --- a/typedapi/types/autofollowpatternsummary.go +++ b/typedapi/types/autofollowpatternsummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AutoFollowPatternSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/get_auto_follow_pattern/types.ts#L28-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/get_auto_follow_pattern/types.ts#L28-L52 type AutoFollowPatternSummary struct { Active bool `json:"active"` // FollowIndexPattern The name of follower index. @@ -69,7 +70,7 @@ func (s *AutoFollowPatternSummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Active", err) } s.Active = value case bool: @@ -78,17 +79,17 @@ func (s *AutoFollowPatternSummary) UnmarshalJSON(data []byte) error { case "follow_index_pattern": if err := dec.Decode(&s.FollowIndexPattern); err != nil { - return err + return fmt.Errorf("%s | %w", "FollowIndexPattern", err) } case "leader_index_exclusion_patterns": if err := dec.Decode(&s.LeaderIndexExclusionPatterns); err != nil { - return err + return fmt.Errorf("%s | %w", "LeaderIndexExclusionPatterns", err) } case "leader_index_patterns": if err := dec.Decode(&s.LeaderIndexPatterns); err != nil { - return err + return fmt.Errorf("%s | %w", "LeaderIndexPatterns", err) } case "max_outstanding_read_requests": @@ -99,7 +100,7 @@ func (s *AutoFollowPatternSummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOutstandingReadRequests", err) } s.MaxOutstandingReadRequests = value case float64: @@ -110,7 +111,7 @@ func (s *AutoFollowPatternSummary) UnmarshalJSON(data []byte) error { case "remote_cluster": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RemoteCluster", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/autofollowstats.go b/typedapi/types/autofollowstats.go index 84a4bafdaf..5e6ee8ca62 100644 --- a/typedapi/types/autofollowstats.go +++ b/typedapi/types/autofollowstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AutoFollowStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/stats/types.ts.ts#L33-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/stats/types.ts.ts#L33-L39 type AutoFollowStats struct { AutoFollowedClusters []AutoFollowedCluster `json:"auto_followed_clusters"` NumberOfFailedFollowIndices int64 `json:"number_of_failed_follow_indices"` @@ -56,7 +57,7 @@ func (s *AutoFollowStats) UnmarshalJSON(data []byte) error { case "auto_followed_clusters": if err := dec.Decode(&s.AutoFollowedClusters); err != nil { - return err + return fmt.Errorf("%s | %w", "AutoFollowedClusters", err) } case "number_of_failed_follow_indices": @@ -66,7 +67,7 @@ func (s *AutoFollowStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfFailedFollowIndices", err) } s.NumberOfFailedFollowIndices = value case float64: @@ -81,7 +82,7 @@ func (s *AutoFollowStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfFailedRemoteClusterStateRequests", err) } s.NumberOfFailedRemoteClusterStateRequests = value case float64: @@ -96,7 +97,7 @@ func (s *AutoFollowStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfSuccessfulFollowIndices", err) } s.NumberOfSuccessfulFollowIndices = value case float64: @@ -106,7 +107,7 @@ func (s *AutoFollowStats) UnmarshalJSON(data []byte) error { case "recent_auto_follow_errors": if err := dec.Decode(&s.RecentAutoFollowErrors); err != nil { - return err + return fmt.Errorf("%s | %w", "RecentAutoFollowErrors", err) } } diff --git a/typedapi/types/autoscalingcapacity.go b/typedapi/types/autoscalingcapacity.go index b71eee90b7..dd04a1081c 100644 --- a/typedapi/types/autoscalingcapacity.go +++ b/typedapi/types/autoscalingcapacity.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // AutoscalingCapacity type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L38-L41 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L38-L41 type AutoscalingCapacity struct { Node AutoscalingResources `json:"node"` Total AutoscalingResources `json:"total"` diff --git a/typedapi/types/autoscalingdecider.go b/typedapi/types/autoscalingdecider.go index e9efdea722..5d889fd295 100644 --- a/typedapi/types/autoscalingdecider.go +++ b/typedapi/types/autoscalingdecider.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AutoscalingDecider type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L52-L56 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L52-L56 type AutoscalingDecider struct { ReasonDetails json.RawMessage `json:"reason_details,omitempty"` ReasonSummary *string `json:"reason_summary,omitempty"` @@ -54,13 +55,13 @@ func (s *AutoscalingDecider) UnmarshalJSON(data []byte) error { case "reason_details": if err := dec.Decode(&s.ReasonDetails); err != nil { - return err + return fmt.Errorf("%s | %w", "ReasonDetails", err) } case "reason_summary": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ReasonSummary", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -71,7 +72,7 @@ func (s *AutoscalingDecider) UnmarshalJSON(data []byte) error { case "required_capacity": if err := dec.Decode(&s.RequiredCapacity); err != nil { - return err + return fmt.Errorf("%s | %w", "RequiredCapacity", err) } } diff --git a/typedapi/types/autoscalingdeciders.go b/typedapi/types/autoscalingdeciders.go index e63e222d77..86d0c965e7 100644 --- a/typedapi/types/autoscalingdeciders.go +++ b/typedapi/types/autoscalingdeciders.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // AutoscalingDeciders type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L31-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L31-L36 type AutoscalingDeciders struct { CurrentCapacity AutoscalingCapacity `json:"current_capacity"` CurrentNodes []AutoscalingNode `json:"current_nodes"` diff --git a/typedapi/types/autoscalingnode.go b/typedapi/types/autoscalingnode.go index 35c284814e..6460e03ecb 100644 --- a/typedapi/types/autoscalingnode.go +++ b/typedapi/types/autoscalingnode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // AutoscalingNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L48-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L48-L50 type AutoscalingNode struct { Name string `json:"name"` } @@ -51,7 +52,7 @@ func (s *AutoscalingNode) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/autoscalingpolicy.go b/typedapi/types/autoscalingpolicy.go index ab4881e890..a00faa3ee3 100644 --- a/typedapi/types/autoscalingpolicy.go +++ b/typedapi/types/autoscalingpolicy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // AutoscalingPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/autoscaling/_types/AutoscalingPolicy.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/autoscaling/_types/AutoscalingPolicy.ts#L23-L27 type AutoscalingPolicy struct { // Deciders Decider settings Deciders map[string]json.RawMessage `json:"deciders"` diff --git a/typedapi/types/autoscalingresources.go b/typedapi/types/autoscalingresources.go index 3c2e8b6d3e..11d15366da 100644 --- a/typedapi/types/autoscalingresources.go +++ b/typedapi/types/autoscalingresources.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AutoscalingResources type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L43-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L43-L46 type AutoscalingResources struct { Memory int `json:"memory"` Storage int `json:"storage"` @@ -59,7 +60,7 @@ func (s *AutoscalingResources) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Memory", err) } s.Memory = value case float64: @@ -75,7 +76,7 @@ func (s *AutoscalingResources) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Storage", err) } s.Storage = value case float64: diff --git a/typedapi/types/averageaggregation.go b/typedapi/types/averageaggregation.go index ca6b67a0e9..5e9bbf1797 100644 --- a/typedapi/types/averageaggregation.go +++ b/typedapi/types/averageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L55-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L55-L55 type AverageAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -58,13 +59,13 @@ func (s *AverageAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -75,13 +76,13 @@ func (s *AverageAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -90,7 +91,7 @@ func (s *AverageAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -99,7 +100,7 @@ func (s *AverageAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -107,7 +108,7 @@ func (s *AverageAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/averagebucketaggregation.go b/typedapi/types/averagebucketaggregation.go index f028361a4b..c538c82198 100644 --- a/typedapi/types/averagebucketaggregation.go +++ b/typedapi/types/averagebucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // AverageBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L78-L78 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L78-L78 type AverageBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -63,13 +64,13 @@ func (s *AverageBucketAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,18 +81,18 @@ func (s *AverageBucketAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/avgaggregate.go b/typedapi/types/avgaggregate.go index 5343c4c8db..e7398b9975 100644 --- a/typedapi/types/avgaggregate.go +++ b/typedapi/types/avgaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // AvgAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L209-L210 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L209-L210 type AvgAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to @@ -57,18 +58,18 @@ func (s *AvgAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/azurerepository.go b/typedapi/types/azurerepository.go new file mode 100644 index 0000000000..30fa33900a --- /dev/null +++ b/typedapi/types/azurerepository.go @@ -0,0 +1,94 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" +) + +// AzureRepository type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotRepository.ts#L40-L43 +type AzureRepository struct { + Settings AzureRepositorySettings `json:"settings"` + Type string `json:"type,omitempty"` + Uuid *string `json:"uuid,omitempty"` +} + +func (s *AzureRepository) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return fmt.Errorf("%s | %w", "Settings", err) + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return fmt.Errorf("%s | %w", "Type", err) + } + + case "uuid": + if err := dec.Decode(&s.Uuid); err != nil { + return fmt.Errorf("%s | %w", "Uuid", err) + } + + } + } + return nil +} + +// MarshalJSON override marshalling to include literal value +func (s AzureRepository) MarshalJSON() ([]byte, error) { + type innerAzureRepository AzureRepository + tmp := innerAzureRepository{ + Settings: s.Settings, + Type: s.Type, + Uuid: s.Uuid, + } + + tmp.Type = "azure" + + return json.Marshal(tmp) +} + +// NewAzureRepository returns a AzureRepository. +func NewAzureRepository() *AzureRepository { + r := &AzureRepository{} + + return r +} diff --git a/typedapi/types/azurerepositorysettings.go b/typedapi/types/azurerepositorysettings.go new file mode 100644 index 0000000000..7c3a52cba4 --- /dev/null +++ b/typedapi/types/azurerepositorysettings.go @@ -0,0 +1,163 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// AzureRepositorySettings type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotRepository.ts#L77-L83 +type AzureRepositorySettings struct { + BasePath *string `json:"base_path,omitempty"` + ChunkSize ByteSize `json:"chunk_size,omitempty"` + Client *string `json:"client,omitempty"` + Compress *bool `json:"compress,omitempty"` + Container *string `json:"container,omitempty"` + LocationMode *string `json:"location_mode,omitempty"` + MaxRestoreBytesPerSec ByteSize `json:"max_restore_bytes_per_sec,omitempty"` + MaxSnapshotBytesPerSec ByteSize `json:"max_snapshot_bytes_per_sec,omitempty"` + Readonly *bool `json:"readonly,omitempty"` +} + +func (s *AzureRepositorySettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "base_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "BasePath", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.BasePath = &o + + case "chunk_size": + if err := dec.Decode(&s.ChunkSize); err != nil { + return fmt.Errorf("%s | %w", "ChunkSize", err) + } + + case "client": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Client", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Client = &o + + case "compress": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Compress", err) + } + s.Compress = &value + case bool: + s.Compress = &v + } + + case "container": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Container", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Container = &o + + case "location_mode": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "LocationMode", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.LocationMode = &o + + case "max_restore_bytes_per_sec": + if err := dec.Decode(&s.MaxRestoreBytesPerSec); err != nil { + return fmt.Errorf("%s | %w", "MaxRestoreBytesPerSec", err) + } + + case "max_snapshot_bytes_per_sec": + if err := dec.Decode(&s.MaxSnapshotBytesPerSec); err != nil { + return fmt.Errorf("%s | %w", "MaxSnapshotBytesPerSec", err) + } + + case "readonly": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Readonly", err) + } + s.Readonly = &value + case bool: + s.Readonly = &v + } + + } + } + return nil +} + +// NewAzureRepositorySettings returns a AzureRepositorySettings. +func NewAzureRepositorySettings() *AzureRepositorySettings { + r := &AzureRepositorySettings{} + + return r +} diff --git a/typedapi/types/base.go b/typedapi/types/base.go index 2572451e2f..ef115fa3f7 100644 --- a/typedapi/types/base.go +++ b/typedapi/types/base.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Base type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L30-L33 type Base struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -58,7 +59,7 @@ func (s *Base) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -72,7 +73,7 @@ func (s *Base) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: diff --git a/typedapi/types/baseindicator.go b/typedapi/types/baseindicator.go index fc3b1979bc..b929216396 100644 --- a/typedapi/types/baseindicator.go +++ b/typedapi/types/baseindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // BaseIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L42-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L42-L47 type BaseIndicator struct { Diagnosis []Diagnosis `json:"diagnosis,omitempty"` Impacts []Impact `json:"impacts,omitempty"` @@ -57,23 +58,23 @@ func (s *BaseIndicator) UnmarshalJSON(data []byte) error { case "diagnosis": if err := dec.Decode(&s.Diagnosis); err != nil { - return err + return fmt.Errorf("%s | %w", "Diagnosis", err) } case "impacts": if err := dec.Decode(&s.Impacts); err != nil { - return err + return fmt.Errorf("%s | %w", "Impacts", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "symptom": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Symptom", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/basenode.go b/typedapi/types/basenode.go index 464e8a3c58..72387bbf76 100644 --- a/typedapi/types/basenode.go +++ b/typedapi/types/basenode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/noderole" @@ -31,7 +32,7 @@ import ( // BaseNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_spec_utils/BaseNode.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_spec_utils/BaseNode.ts#L25-L32 type BaseNode struct { Attributes map[string]string `json:"attributes"` Host string `json:"host"` @@ -61,32 +62,32 @@ func (s *BaseNode) UnmarshalJSON(data []byte) error { s.Attributes = make(map[string]string, 0) } if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "ip": if err := dec.Decode(&s.Ip); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "transport_address": if err := dec.Decode(&s.TransportAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "TransportAddress", err) } } diff --git a/typedapi/types/binaryproperty.go b/typedapi/types/binaryproperty.go index 3c834e234f..e6f0a66d04 100644 --- a/typedapi/types/binaryproperty.go +++ b/typedapi/types/binaryproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // BinaryProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L49-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L49-L51 type BinaryProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -68,13 +69,13 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -85,7 +86,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -94,7 +95,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -412,7 +413,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -425,7 +426,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -738,7 +739,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -754,7 +755,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -763,7 +764,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/booleanproperty.go b/typedapi/types/booleanproperty.go index 5c145b3410..66107b969d 100644 --- a/typedapi/types/booleanproperty.go +++ b/typedapi/types/booleanproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // BooleanProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L53-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L53-L59 type BooleanProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -73,7 +74,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -88,13 +89,13 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -105,7 +106,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -114,12 +115,12 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fielddata": if err := dec.Decode(&s.Fielddata); err != nil { - return err + return fmt.Errorf("%s | %w", "Fielddata", err) } case "fields": @@ -437,7 +438,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -452,7 +453,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -464,7 +465,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": @@ -474,7 +475,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } s.NullValue = &value case bool: @@ -791,7 +792,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -807,7 +808,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -816,7 +817,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/boolquery.go b/typedapi/types/boolquery.go index c21eea3e2d..e5061b2a84 100644 --- a/typedapi/types/boolquery.go +++ b/typedapi/types/boolquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BoolQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L28-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L28-L52 type BoolQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -77,7 +78,7 @@ func (s *BoolQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -92,19 +93,19 @@ func (s *BoolQuery) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewQuery() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } s.Filter = append(s.Filter, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } } case "minimum_should_match": if err := dec.Decode(&s.MinimumShouldMatch); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatch", err) } case "must": @@ -113,13 +114,13 @@ func (s *BoolQuery) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewQuery() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Must", err) } s.Must = append(s.Must, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Must); err != nil { - return err + return fmt.Errorf("%s | %w", "Must", err) } } @@ -129,20 +130,20 @@ func (s *BoolQuery) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewQuery() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "MustNot", err) } s.MustNot = append(s.MustNot, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.MustNot); err != nil { - return err + return fmt.Errorf("%s | %w", "MustNot", err) } } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -157,13 +158,13 @@ func (s *BoolQuery) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewQuery() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Should", err) } s.Should = append(s.Should, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Should); err != nil { - return err + return fmt.Errorf("%s | %w", "Should", err) } } diff --git a/typedapi/types/boostingquery.go b/typedapi/types/boostingquery.go index 22edec07a3..8d894bdc35 100644 --- a/typedapi/types/boostingquery.go +++ b/typedapi/types/boostingquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BoostingQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L54-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L54-L67 type BoostingQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -70,7 +71,7 @@ func (s *BoostingQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -81,7 +82,7 @@ func (s *BoostingQuery) UnmarshalJSON(data []byte) error { case "negative": if err := dec.Decode(&s.Negative); err != nil { - return err + return fmt.Errorf("%s | %w", "Negative", err) } case "negative_boost": @@ -91,7 +92,7 @@ func (s *BoostingQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NegativeBoost", err) } f := Float64(value) s.NegativeBoost = f @@ -102,13 +103,13 @@ func (s *BoostingQuery) UnmarshalJSON(data []byte) error { case "positive": if err := dec.Decode(&s.Positive); err != nil { - return err + return fmt.Errorf("%s | %w", "Positive", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/boxplotaggregate.go b/typedapi/types/boxplotaggregate.go index 9088fc872b..689a060d1c 100644 --- a/typedapi/types/boxplotaggregate.go +++ b/typedapi/types/boxplotaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BoxPlotAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L706-L722 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L706-L722 type BoxPlotAggregate struct { Lower Float64 `json:"lower"` LowerAsString *string `json:"lower_as_string,omitempty"` @@ -71,7 +72,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lower", err) } f := Float64(value) s.Lower = f @@ -83,7 +84,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case "lower_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LowerAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -99,7 +100,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } f := Float64(value) s.Max = f @@ -111,7 +112,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case "max_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -122,7 +123,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min": @@ -132,7 +133,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } f := Float64(value) s.Min = f @@ -144,7 +145,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case "min_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MinAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -160,7 +161,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Q1", err) } f := Float64(value) s.Q1 = f @@ -172,7 +173,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case "q1_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Q1AsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -188,7 +189,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Q2", err) } f := Float64(value) s.Q2 = f @@ -200,7 +201,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case "q2_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Q2AsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -216,7 +217,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Q3", err) } f := Float64(value) s.Q3 = f @@ -228,7 +229,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case "q3_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Q3AsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -244,7 +245,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Upper", err) } f := Float64(value) s.Upper = f @@ -256,7 +257,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { case "upper_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UpperAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/boxplotaggregation.go b/typedapi/types/boxplotaggregation.go index 6fc97bcb16..568a10fcf2 100644 --- a/typedapi/types/boxplotaggregation.go +++ b/typedapi/types/boxplotaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BoxplotAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L57-L62 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L57-L62 type BoxplotAggregation struct { // Compression Limits the maximum number of nodes used by the underlying TDigest algorithm // to `20 * compression`, enabling control of memory usage and approximation @@ -66,7 +67,7 @@ func (s *BoxplotAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Compression", err) } f := Float64(value) s.Compression = &f @@ -77,18 +78,18 @@ func (s *BoxplotAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -97,7 +98,7 @@ func (s *BoxplotAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -106,7 +107,7 @@ func (s *BoxplotAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -114,7 +115,7 @@ func (s *BoxplotAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/breaker.go b/typedapi/types/breaker.go index df550fefea..4223e5b2bb 100644 --- a/typedapi/types/breaker.go +++ b/typedapi/types/breaker.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Breaker type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L434-L459 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L434-L459 type Breaker struct { // EstimatedSize Estimated memory used for the operation. EstimatedSize *string `json:"estimated_size,omitempty"` @@ -66,7 +67,7 @@ func (s *Breaker) UnmarshalJSON(data []byte) error { case "estimated_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "EstimatedSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,7 +83,7 @@ func (s *Breaker) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "EstimatedSizeInBytes", err) } s.EstimatedSizeInBytes = &value case float64: @@ -93,7 +94,7 @@ func (s *Breaker) UnmarshalJSON(data []byte) error { case "limit_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LimitSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,7 +110,7 @@ func (s *Breaker) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LimitSizeInBytes", err) } s.LimitSizeInBytes = &value case float64: @@ -124,7 +125,7 @@ func (s *Breaker) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Overhead", err) } f := float32(value) s.Overhead = &f @@ -140,7 +141,7 @@ func (s *Breaker) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Tripped", err) } f := float32(value) s.Tripped = &f diff --git a/typedapi/types/bucketcorrelationaggregation.go b/typedapi/types/bucketcorrelationaggregation.go index 7018086d4f..aba2641e11 100644 --- a/typedapi/types/bucketcorrelationaggregation.go +++ b/typedapi/types/bucketcorrelationaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BucketCorrelationAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L129-L135 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L129-L135 type BucketCorrelationAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -57,23 +58,23 @@ func (s *BucketCorrelationAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "function": if err := dec.Decode(&s.Function); err != nil { - return err + return fmt.Errorf("%s | %w", "Function", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/bucketcorrelationfunction.go b/typedapi/types/bucketcorrelationfunction.go index 56fddca1ad..9dd6439cbe 100644 --- a/typedapi/types/bucketcorrelationfunction.go +++ b/typedapi/types/bucketcorrelationfunction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // BucketCorrelationFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L137-L142 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L137-L142 type BucketCorrelationFunction struct { // CountCorrelation The configuration to calculate a count correlation. This function is designed // for determining the correlation of a term value and a given metric. diff --git a/typedapi/types/bucketcorrelationfunctioncountcorrelation.go b/typedapi/types/bucketcorrelationfunctioncountcorrelation.go index e5da885b79..42cc9453d9 100644 --- a/typedapi/types/bucketcorrelationfunctioncountcorrelation.go +++ b/typedapi/types/bucketcorrelationfunctioncountcorrelation.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // BucketCorrelationFunctionCountCorrelation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L144-L147 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L144-L147 type BucketCorrelationFunctionCountCorrelation struct { // Indicator The indicator with which to correlate the configured `bucket_path` values. Indicator BucketCorrelationFunctionCountCorrelationIndicator `json:"indicator"` diff --git a/typedapi/types/bucketcorrelationfunctioncountcorrelationindicator.go b/typedapi/types/bucketcorrelationfunctioncountcorrelationindicator.go index 701e6c25d2..81478b8533 100644 --- a/typedapi/types/bucketcorrelationfunctioncountcorrelationindicator.go +++ b/typedapi/types/bucketcorrelationfunctioncountcorrelationindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BucketCorrelationFunctionCountCorrelationIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L149-L167 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L149-L167 type BucketCorrelationFunctionCountCorrelationIndicator struct { // DocCount The total number of documents that initially created the expectations. It’s // required to be greater @@ -74,7 +75,7 @@ func (s *BucketCorrelationFunctionCountCorrelationIndicator) UnmarshalJSON(data case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -84,12 +85,12 @@ func (s *BucketCorrelationFunctionCountCorrelationIndicator) UnmarshalJSON(data case "expectations": if err := dec.Decode(&s.Expectations); err != nil { - return err + return fmt.Errorf("%s | %w", "Expectations", err) } case "fractions": if err := dec.Decode(&s.Fractions); err != nil { - return err + return fmt.Errorf("%s | %w", "Fractions", err) } } diff --git a/typedapi/types/bucketinfluencer.go b/typedapi/types/bucketinfluencer.go index 5de3fced6a..ca3d00071b 100644 --- a/typedapi/types/bucketinfluencer.go +++ b/typedapi/types/bucketinfluencer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BucketInfluencer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Bucket.ts#L80-L128 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Bucket.ts#L80-L128 type BucketInfluencer struct { // AnomalyScore A normalized score between 0-100, which is calculated for each bucket // influencer. This score might be updated as @@ -88,7 +89,7 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AnomalyScore", err) } f := Float64(value) s.AnomalyScore = f @@ -99,12 +100,12 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { case "bucket_span": if err := dec.Decode(&s.BucketSpan); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketSpan", err) } case "influencer_field_name": if err := dec.Decode(&s.InfluencerFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "InfluencerFieldName", err) } case "initial_anomaly_score": @@ -114,7 +115,7 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "InitialAnomalyScore", err) } f := Float64(value) s.InitialAnomalyScore = f @@ -130,7 +131,7 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsInterim", err) } s.IsInterim = value case bool: @@ -139,7 +140,7 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "probability": @@ -149,7 +150,7 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Probability", err) } f := Float64(value) s.Probability = f @@ -165,7 +166,7 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RawAnomalyScore", err) } f := Float64(value) s.RawAnomalyScore = f @@ -177,7 +178,7 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { case "result_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -188,12 +189,12 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } case "timestamp_string": if err := dec.Decode(&s.TimestampString); err != nil { - return err + return fmt.Errorf("%s | %w", "TimestampString", err) } } diff --git a/typedapi/types/bucketksaggregation.go b/typedapi/types/bucketksaggregation.go index 1ea71e302b..7453795a10 100644 --- a/typedapi/types/bucketksaggregation.go +++ b/typedapi/types/bucketksaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BucketKsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L94-L127 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L94-L127 type BucketKsAggregation struct { // Alternative A list of string values indicating which K-S test alternative to calculate. // The valid values @@ -80,28 +81,28 @@ func (s *BucketKsAggregation) UnmarshalJSON(data []byte) error { case "alternative": if err := dec.Decode(&s.Alternative); err != nil { - return err + return fmt.Errorf("%s | %w", "Alternative", err) } case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "fractions": if err := dec.Decode(&s.Fractions); err != nil { - return err + return fmt.Errorf("%s | %w", "Fractions", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -113,7 +114,7 @@ func (s *BucketKsAggregation) UnmarshalJSON(data []byte) error { case "sampling_method": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SamplingMethod", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/bucketmetricvalueaggregate.go b/typedapi/types/bucketmetricvalueaggregate.go index 47c97672dd..4961fb6092 100644 --- a/typedapi/types/bucketmetricvalueaggregate.go +++ b/typedapi/types/bucketmetricvalueaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BucketMetricValueAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L233-L236 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L233-L236 type BucketMetricValueAggregate struct { Keys []string `json:"keys"` Meta Metadata `json:"meta,omitempty"` @@ -58,23 +59,23 @@ func (s *BucketMetricValueAggregate) UnmarshalJSON(data []byte) error { case "keys": if err := dec.Decode(&s.Keys); err != nil { - return err + return fmt.Errorf("%s | %w", "Keys", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/bucketpathaggregation.go b/typedapi/types/bucketpathaggregation.go index c25c1f3f4c..0eba488412 100644 --- a/typedapi/types/bucketpathaggregation.go +++ b/typedapi/types/bucketpathaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BucketPathAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L31-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L31-L37 type BucketPathAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -55,18 +56,18 @@ func (s *BucketPathAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/bucketsadjacencymatrixbucket.go b/typedapi/types/bucketsadjacencymatrixbucket.go index 5b3005d53d..24765fcb5f 100644 --- a/typedapi/types/bucketsadjacencymatrixbucket.go +++ b/typedapi/types/bucketsadjacencymatrixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]AdjacencyMatrixBucket // []AdjacencyMatrixBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsAdjacencyMatrixBucket interface{} diff --git a/typedapi/types/bucketsapikeyquerycontainer.go b/typedapi/types/bucketsapikeyquerycontainer.go new file mode 100644 index 0000000000..a58c057838 --- /dev/null +++ b/typedapi/types/bucketsapikeyquerycontainer.go @@ -0,0 +1,29 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +// BucketsAPIKeyQueryContainer holds the union for the following types: +// +// map[string]APIKeyQueryContainer +// []APIKeyQueryContainer +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsAPIKeyQueryContainer interface{} diff --git a/typedapi/types/bucketscompositebucket.go b/typedapi/types/bucketscompositebucket.go index b30d354d84..4ae016a43c 100644 --- a/typedapi/types/bucketscompositebucket.go +++ b/typedapi/types/bucketscompositebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]CompositeBucket // []CompositeBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsCompositeBucket interface{} diff --git a/typedapi/types/bucketscriptaggregation.go b/typedapi/types/bucketscriptaggregation.go index e71a813887..c5bfb08dda 100644 --- a/typedapi/types/bucketscriptaggregation.go +++ b/typedapi/types/bucketscriptaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // BucketScriptAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L80-L85 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L80-L85 type BucketScriptAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -65,13 +66,13 @@ func (s *BucketScriptAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,18 +83,18 @@ func (s *BucketScriptAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +106,7 @@ func (s *BucketScriptAggregation) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -114,7 +115,7 @@ func (s *BucketScriptAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -123,7 +124,7 @@ func (s *BucketScriptAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -131,7 +132,7 @@ func (s *BucketScriptAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/bucketsdatehistogrambucket.go b/typedapi/types/bucketsdatehistogrambucket.go index 0445f9b898..c0efaf660e 100644 --- a/typedapi/types/bucketsdatehistogrambucket.go +++ b/typedapi/types/bucketsdatehistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]DateHistogramBucket // []DateHistogramBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsDateHistogramBucket interface{} diff --git a/typedapi/types/bucketsdoubletermsbucket.go b/typedapi/types/bucketsdoubletermsbucket.go index c4530d53bb..c5b6b33371 100644 --- a/typedapi/types/bucketsdoubletermsbucket.go +++ b/typedapi/types/bucketsdoubletermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]DoubleTermsBucket // []DoubleTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsDoubleTermsBucket interface{} diff --git a/typedapi/types/bucketselectoraggregation.go b/typedapi/types/bucketselectoraggregation.go index 97b2f313e3..52d8384bba 100644 --- a/typedapi/types/bucketselectoraggregation.go +++ b/typedapi/types/bucketselectoraggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // BucketSelectorAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L87-L92 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L87-L92 type BucketSelectorAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -65,13 +66,13 @@ func (s *BucketSelectorAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,18 +83,18 @@ func (s *BucketSelectorAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +106,7 @@ func (s *BucketSelectorAggregation) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -114,7 +115,7 @@ func (s *BucketSelectorAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -123,7 +124,7 @@ func (s *BucketSelectorAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -131,7 +132,7 @@ func (s *BucketSelectorAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/bucketsfiltersbucket.go b/typedapi/types/bucketsfiltersbucket.go index 6141c67341..c2fb252ecf 100644 --- a/typedapi/types/bucketsfiltersbucket.go +++ b/typedapi/types/bucketsfiltersbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]FiltersBucket // []FiltersBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsFiltersBucket interface{} diff --git a/typedapi/types/bucketsfrequentitemsetsbucket.go b/typedapi/types/bucketsfrequentitemsetsbucket.go index 358284a6b5..148d9121ed 100644 --- a/typedapi/types/bucketsfrequentitemsetsbucket.go +++ b/typedapi/types/bucketsfrequentitemsetsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]FrequentItemSetsBucket // []FrequentItemSetsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsFrequentItemSetsBucket interface{} diff --git a/typedapi/types/bucketsgeohashgridbucket.go b/typedapi/types/bucketsgeohashgridbucket.go index 6fe7069cd3..693bc0b95c 100644 --- a/typedapi/types/bucketsgeohashgridbucket.go +++ b/typedapi/types/bucketsgeohashgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]GeoHashGridBucket // []GeoHashGridBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsGeoHashGridBucket interface{} diff --git a/typedapi/types/bucketsgeohexgridbucket.go b/typedapi/types/bucketsgeohexgridbucket.go index bd34f5bae1..e91594ccf3 100644 --- a/typedapi/types/bucketsgeohexgridbucket.go +++ b/typedapi/types/bucketsgeohexgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]GeoHexGridBucket // []GeoHexGridBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsGeoHexGridBucket interface{} diff --git a/typedapi/types/bucketsgeotilegridbucket.go b/typedapi/types/bucketsgeotilegridbucket.go index 764feabbf3..923bda354f 100644 --- a/typedapi/types/bucketsgeotilegridbucket.go +++ b/typedapi/types/bucketsgeotilegridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]GeoTileGridBucket // []GeoTileGridBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsGeoTileGridBucket interface{} diff --git a/typedapi/types/bucketshistogrambucket.go b/typedapi/types/bucketshistogrambucket.go index a71a59db18..fc4369e88f 100644 --- a/typedapi/types/bucketshistogrambucket.go +++ b/typedapi/types/bucketshistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]HistogramBucket // []HistogramBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsHistogramBucket interface{} diff --git a/typedapi/types/bucketsipprefixbucket.go b/typedapi/types/bucketsipprefixbucket.go index 99272867d2..19012cdcf6 100644 --- a/typedapi/types/bucketsipprefixbucket.go +++ b/typedapi/types/bucketsipprefixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]IpPrefixBucket // []IpPrefixBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsIpPrefixBucket interface{} diff --git a/typedapi/types/bucketsiprangebucket.go b/typedapi/types/bucketsiprangebucket.go index 4c67f0642f..795490f6ea 100644 --- a/typedapi/types/bucketsiprangebucket.go +++ b/typedapi/types/bucketsiprangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]IpRangeBucket // []IpRangeBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsIpRangeBucket interface{} diff --git a/typedapi/types/bucketslongraretermsbucket.go b/typedapi/types/bucketslongraretermsbucket.go index 9dd068c602..169bee8735 100644 --- a/typedapi/types/bucketslongraretermsbucket.go +++ b/typedapi/types/bucketslongraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]LongRareTermsBucket // []LongRareTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsLongRareTermsBucket interface{} diff --git a/typedapi/types/bucketslongtermsbucket.go b/typedapi/types/bucketslongtermsbucket.go index d4008f6c52..4698d7e6f5 100644 --- a/typedapi/types/bucketslongtermsbucket.go +++ b/typedapi/types/bucketslongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]LongTermsBucket // []LongTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsLongTermsBucket interface{} diff --git a/typedapi/types/bucketsmultitermsbucket.go b/typedapi/types/bucketsmultitermsbucket.go index f19fadaa7f..cce056de19 100644 --- a/typedapi/types/bucketsmultitermsbucket.go +++ b/typedapi/types/bucketsmultitermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]MultiTermsBucket // []MultiTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsMultiTermsBucket interface{} diff --git a/typedapi/types/bucketsortaggregation.go b/typedapi/types/bucketsortaggregation.go index f49fe909fb..4003e9fca6 100644 --- a/typedapi/types/bucketsortaggregation.go +++ b/typedapi/types/bucketsortaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // BucketSortAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L169-L190 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L169-L190 type BucketSortAggregation struct { // From Buckets in positions prior to `from` will be truncated. From *int `json:"from,omitempty"` @@ -70,7 +71,7 @@ func (s *BucketSortAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } s.From = &value case float64: @@ -80,18 +81,18 @@ func (s *BucketSortAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -108,7 +109,7 @@ func (s *BucketSortAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -122,13 +123,13 @@ func (s *BucketSortAggregation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(SortCombinations) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } s.Sort = append(s.Sort, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } } diff --git a/typedapi/types/bucketspath.go b/typedapi/types/bucketspath.go index 87f279cb04..5b21311e20 100644 --- a/typedapi/types/bucketspath.go +++ b/typedapi/types/bucketspath.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ package types // []string // map[string]string // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L53-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L53-L59 type BucketsPath interface{} diff --git a/typedapi/types/bucketsquery.go b/typedapi/types/bucketsquery.go index afddca8c32..9a45394604 100644 --- a/typedapi/types/bucketsquery.go +++ b/typedapi/types/bucketsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]Query // []Query // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsQuery interface{} diff --git a/typedapi/types/bucketsrangebucket.go b/typedapi/types/bucketsrangebucket.go index 9d0c232a73..b7d28913af 100644 --- a/typedapi/types/bucketsrangebucket.go +++ b/typedapi/types/bucketsrangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]RangeBucket // []RangeBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsRangeBucket interface{} diff --git a/typedapi/types/bucketssignificantlongtermsbucket.go b/typedapi/types/bucketssignificantlongtermsbucket.go index a43e87dfc0..4cab4adb9e 100644 --- a/typedapi/types/bucketssignificantlongtermsbucket.go +++ b/typedapi/types/bucketssignificantlongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]SignificantLongTermsBucket // []SignificantLongTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsSignificantLongTermsBucket interface{} diff --git a/typedapi/types/bucketssignificantstringtermsbucket.go b/typedapi/types/bucketssignificantstringtermsbucket.go index e97aa1fa20..540558ff03 100644 --- a/typedapi/types/bucketssignificantstringtermsbucket.go +++ b/typedapi/types/bucketssignificantstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]SignificantStringTermsBucket // []SignificantStringTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsSignificantStringTermsBucket interface{} diff --git a/typedapi/types/bucketsstringraretermsbucket.go b/typedapi/types/bucketsstringraretermsbucket.go index ed7bffbc34..f3613ba609 100644 --- a/typedapi/types/bucketsstringraretermsbucket.go +++ b/typedapi/types/bucketsstringraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]StringRareTermsBucket // []StringRareTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsStringRareTermsBucket interface{} diff --git a/typedapi/types/bucketsstringtermsbucket.go b/typedapi/types/bucketsstringtermsbucket.go index cb7c7a07e7..1187a88528 100644 --- a/typedapi/types/bucketsstringtermsbucket.go +++ b/typedapi/types/bucketsstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]StringTermsBucket // []StringTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsStringTermsBucket interface{} diff --git a/typedapi/types/bucketsummary.go b/typedapi/types/bucketsummary.go index 31eb109091..86b579d659 100644 --- a/typedapi/types/bucketsummary.go +++ b/typedapi/types/bucketsummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BucketSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Bucket.ts#L31-L78 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Bucket.ts#L31-L78 type BucketSummary struct { // AnomalyScore The maximum anomaly score, between 0-100, for any of the bucket influencers. // This is an overall, rate-limited @@ -90,7 +91,7 @@ func (s *BucketSummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AnomalyScore", err) } f := Float64(value) s.AnomalyScore = f @@ -101,12 +102,12 @@ func (s *BucketSummary) UnmarshalJSON(data []byte) error { case "bucket_influencers": if err := dec.Decode(&s.BucketInfluencers); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketInfluencers", err) } case "bucket_span": if err := dec.Decode(&s.BucketSpan); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketSpan", err) } case "event_count": @@ -116,7 +117,7 @@ func (s *BucketSummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "EventCount", err) } s.EventCount = value case float64: @@ -131,7 +132,7 @@ func (s *BucketSummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "InitialAnomalyScore", err) } f := Float64(value) s.InitialAnomalyScore = f @@ -147,7 +148,7 @@ func (s *BucketSummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsInterim", err) } s.IsInterim = value case bool: @@ -156,18 +157,18 @@ func (s *BucketSummary) UnmarshalJSON(data []byte) error { case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "processing_time_ms": if err := dec.Decode(&s.ProcessingTimeMs); err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessingTimeMs", err) } case "result_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -178,12 +179,12 @@ func (s *BucketSummary) UnmarshalJSON(data []byte) error { case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } case "timestamp_string": if err := dec.Decode(&s.TimestampString); err != nil { - return err + return fmt.Errorf("%s | %w", "TimestampString", err) } } diff --git a/typedapi/types/bucketsvariablewidthhistogrambucket.go b/typedapi/types/bucketsvariablewidthhistogrambucket.go index b44ab5b578..60f0c1b388 100644 --- a/typedapi/types/bucketsvariablewidthhistogrambucket.go +++ b/typedapi/types/bucketsvariablewidthhistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]VariableWidthHistogramBucket // []VariableWidthHistogramBucket // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsVariableWidthHistogramBucket interface{} diff --git a/typedapi/types/bucketsvoid.go b/typedapi/types/bucketsvoid.go index 072a5f6735..be77f39416 100644 --- a/typedapi/types/bucketsvoid.go +++ b/typedapi/types/bucketsvoid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // map[string]interface{} // []interface{} // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L316-L325 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsVoid interface{} diff --git a/typedapi/types/buildinformation.go b/typedapi/types/buildinformation.go index 5dbe8aaebd..4ff5f33f5a 100644 --- a/typedapi/types/buildinformation.go +++ b/typedapi/types/buildinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BuildInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/info/types.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/info/types.ts#L24-L27 type BuildInformation struct { Date DateTime `json:"date"` Hash string `json:"hash"` @@ -53,13 +54,13 @@ func (s *BuildInformation) UnmarshalJSON(data []byte) error { case "date": if err := dec.Decode(&s.Date); err != nil { - return err + return fmt.Errorf("%s | %w", "Date", err) } case "hash": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Hash", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/bulkindexbyscrollfailure.go b/typedapi/types/bulkindexbyscrollfailure.go index d76125b1a5..f09aa43d66 100644 --- a/typedapi/types/bulkindexbyscrollfailure.go +++ b/typedapi/types/bulkindexbyscrollfailure.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BulkIndexByScrollFailure type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Errors.ts#L58-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Errors.ts#L58-L64 type BulkIndexByScrollFailure struct { Cause ErrorCause `json:"cause"` Id string `json:"id"` @@ -56,17 +57,17 @@ func (s *BulkIndexByScrollFailure) UnmarshalJSON(data []byte) error { case "cause": if err := dec.Decode(&s.Cause); err != nil { - return err + return fmt.Errorf("%s | %w", "Cause", err) } case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "status": @@ -77,7 +78,7 @@ func (s *BulkIndexByScrollFailure) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } s.Status = value case float64: @@ -88,7 +89,7 @@ func (s *BulkIndexByScrollFailure) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/bulkstats.go b/typedapi/types/bulkstats.go index 5897c001b2..d03ed38f0e 100644 --- a/typedapi/types/bulkstats.go +++ b/typedapi/types/bulkstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BulkStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L68-L78 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L68-L78 type BulkStats struct { AvgSize ByteSize `json:"avg_size,omitempty"` AvgSizeInBytes int64 `json:"avg_size_in_bytes"` @@ -60,7 +61,7 @@ func (s *BulkStats) UnmarshalJSON(data []byte) error { case "avg_size": if err := dec.Decode(&s.AvgSize); err != nil { - return err + return fmt.Errorf("%s | %w", "AvgSize", err) } case "avg_size_in_bytes": @@ -70,7 +71,7 @@ func (s *BulkStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AvgSizeInBytes", err) } s.AvgSizeInBytes = value case float64: @@ -80,12 +81,12 @@ func (s *BulkStats) UnmarshalJSON(data []byte) error { case "avg_time": if err := dec.Decode(&s.AvgTime); err != nil { - return err + return fmt.Errorf("%s | %w", "AvgTime", err) } case "avg_time_in_millis": if err := dec.Decode(&s.AvgTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "AvgTimeInMillis", err) } case "total_operations": @@ -95,7 +96,7 @@ func (s *BulkStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalOperations", err) } s.TotalOperations = value case float64: @@ -105,7 +106,7 @@ func (s *BulkStats) UnmarshalJSON(data []byte) error { case "total_size": if err := dec.Decode(&s.TotalSize); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalSize", err) } case "total_size_in_bytes": @@ -115,7 +116,7 @@ func (s *BulkStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalSizeInBytes", err) } s.TotalSizeInBytes = value case float64: @@ -125,12 +126,12 @@ func (s *BulkStats) UnmarshalJSON(data []byte) error { case "total_time": if err := dec.Decode(&s.TotalTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTime", err) } case "total_time_in_millis": if err := dec.Decode(&s.TotalTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTimeInMillis", err) } } diff --git a/typedapi/types/bytenumberproperty.go b/typedapi/types/bytenumberproperty.go index 3c12d19431..138354d3a8 100644 --- a/typedapi/types/bytenumberproperty.go +++ b/typedapi/types/bytenumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // ByteNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L164-L167 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L164-L167 type ByteNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -84,7 +85,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -100,7 +101,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -113,13 +114,13 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -130,7 +131,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -139,7 +140,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -457,7 +458,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -472,7 +473,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -486,7 +487,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -498,17 +499,17 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": if err := dec.Decode(&s.NullValue); err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } case "on_script_error": if err := dec.Decode(&s.OnScriptError); err != nil { - return err + return fmt.Errorf("%s | %w", "OnScriptError", err) } case "properties": @@ -821,7 +822,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -830,7 +831,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -839,7 +840,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -847,7 +848,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -857,7 +858,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -873,7 +874,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -887,7 +888,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesDimension", err) } s.TimeSeriesDimension = &value case bool: @@ -896,12 +897,12 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { case "time_series_metric": if err := dec.Decode(&s.TimeSeriesMetric); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesMetric", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/bytesize.go b/typedapi/types/bytesize.go index a833bf0a90..4f6b8737e6 100644 --- a/typedapi/types/bytesize.go +++ b/typedapi/types/bytesize.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // int64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L96-L97 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L96-L97 type ByteSize interface{} diff --git a/typedapi/types/bytesprocessor.go b/typedapi/types/bytesprocessor.go index 7252d34154..e53108b1d9 100644 --- a/typedapi/types/bytesprocessor.go +++ b/typedapi/types/bytesprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // BytesProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L392-L408 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L392-L408 type BytesProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -72,7 +73,7 @@ func (s *BytesProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,13 +84,13 @@ func (s *BytesProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +106,7 @@ func (s *BytesProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -119,7 +120,7 @@ func (s *BytesProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -128,13 +129,13 @@ func (s *BytesProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -145,7 +146,7 @@ func (s *BytesProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/cachequeries.go b/typedapi/types/cachequeries.go index 185e6a7fc9..550bdf49a5 100644 --- a/typedapi/types/cachequeries.go +++ b/typedapi/types/cachequeries.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CacheQueries type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L401-L403 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L405-L407 type CacheQueries struct { Enabled bool `json:"enabled"` } @@ -57,7 +58,7 @@ func (s *CacheQueries) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: diff --git a/typedapi/types/cachestats.go b/typedapi/types/cachestats.go index c0669f96e1..11ff03dcec 100644 --- a/typedapi/types/cachestats.go +++ b/typedapi/types/cachestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CacheStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/stats/types.ts#L37-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/stats/types.ts#L37-L43 type CacheStats struct { Count int `json:"count"` Evictions int `json:"evictions"` @@ -62,7 +63,7 @@ func (s *CacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -78,7 +79,7 @@ func (s *CacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Evictions", err) } s.Evictions = value case float64: @@ -94,7 +95,7 @@ func (s *CacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Hits", err) } s.Hits = value case float64: @@ -110,7 +111,7 @@ func (s *CacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Misses", err) } s.Misses = value case float64: @@ -120,7 +121,7 @@ func (s *CacheStats) UnmarshalJSON(data []byte) error { case "node_id": if err := dec.Decode(&s.NodeId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } } diff --git a/typedapi/types/calendar.go b/typedapi/types/calendar.go index 9d0ccd48b0..e426121a81 100644 --- a/typedapi/types/calendar.go +++ b/typedapi/types/calendar.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Calendar type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_calendars/types.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_calendars/types.ts#L22-L29 type Calendar struct { // CalendarId A string that uniquely identifies a calendar. CalendarId string `json:"calendar_id"` @@ -57,13 +58,13 @@ func (s *Calendar) UnmarshalJSON(data []byte) error { case "calendar_id": if err := dec.Decode(&s.CalendarId); err != nil { - return err + return fmt.Errorf("%s | %w", "CalendarId", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,7 +75,7 @@ func (s *Calendar) UnmarshalJSON(data []byte) error { case "job_ids": if err := dec.Decode(&s.JobIds); err != nil { - return err + return fmt.Errorf("%s | %w", "JobIds", err) } } diff --git a/typedapi/types/calendarevent.go b/typedapi/types/calendarevent.go index 22828dc4fb..69f70f3a1c 100644 --- a/typedapi/types/calendarevent.go +++ b/typedapi/types/calendarevent.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CalendarEvent type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/CalendarEvent.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/CalendarEvent.ts#L23-L33 type CalendarEvent struct { // CalendarId A string that uniquely identifies a calendar. CalendarId *string `json:"calendar_id,omitempty"` @@ -62,13 +63,13 @@ func (s *CalendarEvent) UnmarshalJSON(data []byte) error { case "calendar_id": if err := dec.Decode(&s.CalendarId); err != nil { - return err + return fmt.Errorf("%s | %w", "CalendarId", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,17 +80,17 @@ func (s *CalendarEvent) UnmarshalJSON(data []byte) error { case "end_time": if err := dec.Decode(&s.EndTime); err != nil { - return err + return fmt.Errorf("%s | %w", "EndTime", err) } case "event_id": if err := dec.Decode(&s.EventId); err != nil { - return err + return fmt.Errorf("%s | %w", "EventId", err) } case "start_time": if err := dec.Decode(&s.StartTime); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTime", err) } } diff --git a/typedapi/types/cardinalityaggregate.go b/typedapi/types/cardinalityaggregate.go index 8e9f2ed790..fb22d63dd1 100644 --- a/typedapi/types/cardinalityaggregate.go +++ b/typedapi/types/cardinalityaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CardinalityAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L138-L141 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L138-L141 type CardinalityAggregate struct { Meta Metadata `json:"meta,omitempty"` Value int64 `json:"value"` @@ -53,7 +54,7 @@ func (s *CardinalityAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": @@ -63,7 +64,7 @@ func (s *CardinalityAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } s.Value = value case float64: diff --git a/typedapi/types/cardinalityaggregation.go b/typedapi/types/cardinalityaggregation.go index 09247a2617..b5d893d3f7 100644 --- a/typedapi/types/cardinalityaggregation.go +++ b/typedapi/types/cardinalityaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // CardinalityAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L87-L99 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L87-L99 type CardinalityAggregation struct { // ExecutionHint Mechanism by which cardinality aggregations is run. ExecutionHint *cardinalityexecutionmode.CardinalityExecutionMode `json:"execution_hint,omitempty"` @@ -65,17 +66,17 @@ func (s *CardinalityAggregation) UnmarshalJSON(data []byte) error { case "execution_hint": if err := dec.Decode(&s.ExecutionHint); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutionHint", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "precision_threshold": @@ -86,7 +87,7 @@ func (s *CardinalityAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrecisionThreshold", err) } s.PrecisionThreshold = &value case float64: @@ -101,7 +102,7 @@ func (s *CardinalityAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Rehash", err) } s.Rehash = &value case bool: @@ -111,7 +112,7 @@ func (s *CardinalityAggregation) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -120,7 +121,7 @@ func (s *CardinalityAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -129,7 +130,7 @@ func (s *CardinalityAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -137,7 +138,7 @@ func (s *CardinalityAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/catanonalydetectorcolumns.go b/typedapi/types/catanonalydetectorcolumns.go index b77ade50ad..cca9b96c7a 100644 --- a/typedapi/types/catanonalydetectorcolumns.go +++ b/typedapi/types/catanonalydetectorcolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ import ( // CatAnonalyDetectorColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/_types/CatBase.ts#L402-L404 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/_types/CatBase.ts#L402-L404 type CatAnonalyDetectorColumns []catanomalydetectorcolumn.CatAnomalyDetectorColumn diff --git a/typedapi/types/catcomponenttemplate.go b/typedapi/types/catcomponenttemplate.go index 8257de7cb6..e7fc8d7af0 100644 --- a/typedapi/types/catcomponenttemplate.go +++ b/typedapi/types/catcomponenttemplate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CatComponentTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/component_templates/types.ts#L20-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/component_templates/types.ts#L20-L28 type CatComponentTemplate struct { AliasCount string `json:"alias_count"` IncludedIn string `json:"included_in"` @@ -59,7 +60,7 @@ func (s *CatComponentTemplate) UnmarshalJSON(data []byte) error { case "alias_count": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AliasCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -71,7 +72,7 @@ func (s *CatComponentTemplate) UnmarshalJSON(data []byte) error { case "included_in": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IncludedIn", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,7 +84,7 @@ func (s *CatComponentTemplate) UnmarshalJSON(data []byte) error { case "mapping_count": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MappingCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -95,7 +96,7 @@ func (s *CatComponentTemplate) UnmarshalJSON(data []byte) error { case "metadata_count": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MetadataCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -107,7 +108,7 @@ func (s *CatComponentTemplate) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -119,7 +120,7 @@ func (s *CatComponentTemplate) UnmarshalJSON(data []byte) error { case "settings_count": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SettingsCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -131,7 +132,7 @@ func (s *CatComponentTemplate) UnmarshalJSON(data []byte) error { case "version": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/catdatafeedcolumns.go b/typedapi/types/catdatafeedcolumns.go index 900529d911..8786ff9341 100644 --- a/typedapi/types/catdatafeedcolumns.go +++ b/typedapi/types/catdatafeedcolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ import ( // CatDatafeedColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/_types/CatBase.ts#L559-L559 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/_types/CatBase.ts#L559-L559 type CatDatafeedColumns []catdatafeedcolumn.CatDatafeedColumn diff --git a/typedapi/types/catdfacolumns.go b/typedapi/types/catdfacolumns.go index 5190275358..158a41738c 100644 --- a/typedapi/types/catdfacolumns.go +++ b/typedapi/types/catdfacolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ import ( // CatDfaColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/_types/CatBase.ts#L558-L558 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/_types/CatBase.ts#L558-L558 type CatDfaColumns []catdfacolumn.CatDfaColumn diff --git a/typedapi/types/categorizationanalyzer.go b/typedapi/types/categorizationanalyzer.go index 7f6add2316..b55bd32127 100644 --- a/typedapi/types/categorizationanalyzer.go +++ b/typedapi/types/categorizationanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // CategorizationAnalyzerDefinition // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Analysis.ts#L181-L182 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Analysis.ts#L181-L182 type CategorizationAnalyzer interface{} diff --git a/typedapi/types/categorizationanalyzerdefinition.go b/typedapi/types/categorizationanalyzerdefinition.go index 6dfaafe03f..f370e85c0e 100644 --- a/typedapi/types/categorizationanalyzerdefinition.go +++ b/typedapi/types/categorizationanalyzerdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // CategorizationAnalyzerDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Analysis.ts#L184-L197 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Analysis.ts#L184-L197 type CategorizationAnalyzerDefinition struct { // CharFilter One or more character filters. In addition to the built-in character filters, // other plugins can provide more character filters. If this property is not @@ -132,7 +133,7 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) o := new(interface{}) if err := json.NewDecoder(source).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CharFilter", err) } s.CharFilter = append(s.CharFilter, *o) } @@ -448,7 +449,7 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) o := new(interface{}) if err := json.NewDecoder(source).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } s.Filter = append(s.Filter, *o) } @@ -560,7 +561,7 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { } default: if err := localDec.Decode(&s.Tokenizer); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenizer", err) } } diff --git a/typedapi/types/categorizetextaggregation.go b/typedapi/types/categorizetextaggregation.go index 6881c70f8a..7a05b3b9a0 100644 --- a/typedapi/types/categorizetextaggregation.go +++ b/typedapi/types/categorizetextaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CategorizeTextAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L1037-L1101 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L1037-L1101 type CategorizeTextAggregation struct { // CategorizationAnalyzer The categorization analyzer specifies how the text is analyzed and tokenized // before being categorized. @@ -119,18 +120,18 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { default: if err := localDec.Decode(&s.CategorizationAnalyzer); err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizationAnalyzer", err) } } case "categorization_filters": if err := dec.Decode(&s.CategorizationFilters); err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizationFilters", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "max_matched_tokens": @@ -141,7 +142,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxMatchedTokens", err) } s.MaxMatchedTokens = &value case float64: @@ -157,7 +158,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxUniqueTokens", err) } s.MaxUniqueTokens = &value case float64: @@ -167,7 +168,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min_doc_count": @@ -178,7 +179,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocCount", err) } s.MinDocCount = &value case float64: @@ -189,7 +190,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -206,7 +207,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardMinDocCount", err) } s.ShardMinDocCount = &value case float64: @@ -222,7 +223,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardSize", err) } s.ShardSize = &value case float64: @@ -238,7 +239,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SimilarityThreshold", err) } s.SimilarityThreshold = &value case float64: @@ -254,7 +255,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: diff --git a/typedapi/types/categorizetextanalyzer.go b/typedapi/types/categorizetextanalyzer.go index 6efd9b3e4f..139e25e762 100644 --- a/typedapi/types/categorizetextanalyzer.go +++ b/typedapi/types/categorizetextanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // CustomCategorizeTextAnalyzer // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L1103-L1106 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L1103-L1106 type CategorizeTextAnalyzer interface{} diff --git a/typedapi/types/category.go b/typedapi/types/category.go index 50b482d229..4fb5acd6d7 100644 --- a/typedapi/types/category.go +++ b/typedapi/types/category.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Category type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Category.ts#L23-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Category.ts#L23-L49 type Category struct { // CategoryId A unique identifier for the category. category_id is unique at the job level, // even when per-partition categorization is enabled. @@ -93,18 +94,18 @@ func (s *Category) UnmarshalJSON(data []byte) error { case "category_id": if err := dec.Decode(&s.CategoryId); err != nil { - return err + return fmt.Errorf("%s | %w", "CategoryId", err) } case "examples": if err := dec.Decode(&s.Examples); err != nil { - return err + return fmt.Errorf("%s | %w", "Examples", err) } case "grok_pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GrokPattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -115,18 +116,18 @@ func (s *Category) UnmarshalJSON(data []byte) error { case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "max_matching_length": if err := dec.Decode(&s.MaxMatchingLength); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxMatchingLength", err) } case "mlcategory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Mlcategory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -142,7 +143,7 @@ func (s *Category) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumMatches", err) } s.NumMatches = &value case float64: @@ -153,7 +154,7 @@ func (s *Category) UnmarshalJSON(data []byte) error { case "p": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "P", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -165,7 +166,7 @@ func (s *Category) UnmarshalJSON(data []byte) error { case "partition_field_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PartitionFieldName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -177,7 +178,7 @@ func (s *Category) UnmarshalJSON(data []byte) error { case "partition_field_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PartitionFieldValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -188,13 +189,13 @@ func (s *Category) UnmarshalJSON(data []byte) error { case "preferred_to_categories": if err := dec.Decode(&s.PreferredToCategories); err != nil { - return err + return fmt.Errorf("%s | %w", "PreferredToCategories", err) } case "regex": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Regex", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -206,7 +207,7 @@ func (s *Category) UnmarshalJSON(data []byte) error { case "result_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -218,7 +219,7 @@ func (s *Category) UnmarshalJSON(data []byte) error { case "terms": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Terms", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/cattrainedmodelscolumns.go b/typedapi/types/cattrainedmodelscolumns.go index bcad264e97..62ead2201f 100644 --- a/typedapi/types/cattrainedmodelscolumns.go +++ b/typedapi/types/cattrainedmodelscolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ import ( // CatTrainedModelsColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/_types/CatBase.ts#L636-L638 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/_types/CatBase.ts#L636-L638 type CatTrainedModelsColumns []cattrainedmodelscolumn.CatTrainedModelsColumn diff --git a/typedapi/types/cattransformcolumns.go b/typedapi/types/cattransformcolumns.go index e314c2c2db..c3f09c08f1 100644 --- a/typedapi/types/cattransformcolumns.go +++ b/typedapi/types/cattransformcolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ import ( // CatTransformColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/_types/CatBase.ts#L845-L845 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/_types/CatBase.ts#L845-L845 type CatTransformColumns []cattransformcolumn.CatTransformColumn diff --git a/typedapi/types/ccr.go b/typedapi/types/ccr.go index 81af9879e4..9ecf598f59 100644 --- a/typedapi/types/ccr.go +++ b/typedapi/types/ccr.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Ccr type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L334-L337 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L334-L337 type Ccr struct { AutoFollowPatternsCount int `json:"auto_follow_patterns_count"` Available bool `json:"available"` @@ -61,7 +62,7 @@ func (s *Ccr) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AutoFollowPatternsCount", err) } s.AutoFollowPatternsCount = value case float64: @@ -76,7 +77,7 @@ func (s *Ccr) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -90,7 +91,7 @@ func (s *Ccr) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -105,7 +106,7 @@ func (s *Ccr) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FollowerIndicesCount", err) } s.FollowerIndicesCount = value case float64: diff --git a/typedapi/types/ccrshardstats.go b/typedapi/types/ccrshardstats.go index e9f77c8800..8b6cbc1b03 100644 --- a/typedapi/types/ccrshardstats.go +++ b/typedapi/types/ccrshardstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CcrShardStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/_types/FollowIndexStats.ts#L35-L69 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/_types/FollowIndexStats.ts#L35-L69 type CcrShardStats struct { BytesRead int64 `json:"bytes_read"` FailedReadRequests int64 `json:"failed_read_requests"` @@ -89,7 +90,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BytesRead", err) } s.BytesRead = value case float64: @@ -104,7 +105,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FailedReadRequests", err) } s.FailedReadRequests = value case float64: @@ -119,7 +120,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FailedWriteRequests", err) } s.FailedWriteRequests = value case float64: @@ -129,12 +130,12 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case "fatal_exception": if err := dec.Decode(&s.FatalException); err != nil { - return err + return fmt.Errorf("%s | %w", "FatalException", err) } case "follower_aliases_version": if err := dec.Decode(&s.FollowerAliasesVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "FollowerAliasesVersion", err) } case "follower_global_checkpoint": @@ -144,7 +145,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FollowerGlobalCheckpoint", err) } s.FollowerGlobalCheckpoint = value case float64: @@ -155,7 +156,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case "follower_index": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FollowerIndex", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -166,22 +167,22 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case "follower_mapping_version": if err := dec.Decode(&s.FollowerMappingVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "FollowerMappingVersion", err) } case "follower_max_seq_no": if err := dec.Decode(&s.FollowerMaxSeqNo); err != nil { - return err + return fmt.Errorf("%s | %w", "FollowerMaxSeqNo", err) } case "follower_settings_version": if err := dec.Decode(&s.FollowerSettingsVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "FollowerSettingsVersion", err) } case "last_requested_seq_no": if err := dec.Decode(&s.LastRequestedSeqNo); err != nil { - return err + return fmt.Errorf("%s | %w", "LastRequestedSeqNo", err) } case "leader_global_checkpoint": @@ -191,7 +192,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LeaderGlobalCheckpoint", err) } s.LeaderGlobalCheckpoint = value case float64: @@ -202,7 +203,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case "leader_index": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LeaderIndex", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -213,7 +214,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case "leader_max_seq_no": if err := dec.Decode(&s.LeaderMaxSeqNo); err != nil { - return err + return fmt.Errorf("%s | %w", "LeaderMaxSeqNo", err) } case "operations_read": @@ -223,7 +224,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "OperationsRead", err) } s.OperationsRead = value case float64: @@ -238,7 +239,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "OperationsWritten", err) } s.OperationsWritten = value case float64: @@ -254,7 +255,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OutstandingReadRequests", err) } s.OutstandingReadRequests = value case float64: @@ -270,7 +271,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OutstandingWriteRequests", err) } s.OutstandingWriteRequests = value case float64: @@ -280,13 +281,13 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case "read_exceptions": if err := dec.Decode(&s.ReadExceptions); err != nil { - return err + return fmt.Errorf("%s | %w", "ReadExceptions", err) } case "remote_cluster": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RemoteCluster", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -303,7 +304,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardId", err) } s.ShardId = value case float64: @@ -318,7 +319,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SuccessfulReadRequests", err) } s.SuccessfulReadRequests = value case float64: @@ -333,7 +334,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SuccessfulWriteRequests", err) } s.SuccessfulWriteRequests = value case float64: @@ -343,42 +344,42 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case "time_since_last_read": if err := dec.Decode(&s.TimeSinceLastRead); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSinceLastRead", err) } case "time_since_last_read_millis": if err := dec.Decode(&s.TimeSinceLastReadMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSinceLastReadMillis", err) } case "total_read_remote_exec_time": if err := dec.Decode(&s.TotalReadRemoteExecTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalReadRemoteExecTime", err) } case "total_read_remote_exec_time_millis": if err := dec.Decode(&s.TotalReadRemoteExecTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalReadRemoteExecTimeMillis", err) } case "total_read_time": if err := dec.Decode(&s.TotalReadTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalReadTime", err) } case "total_read_time_millis": if err := dec.Decode(&s.TotalReadTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalReadTimeMillis", err) } case "total_write_time": if err := dec.Decode(&s.TotalWriteTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalWriteTime", err) } case "total_write_time_millis": if err := dec.Decode(&s.TotalWriteTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalWriteTimeMillis", err) } case "write_buffer_operation_count": @@ -388,7 +389,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "WriteBufferOperationCount", err) } s.WriteBufferOperationCount = value case float64: @@ -398,7 +399,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case "write_buffer_size_in_bytes": if err := dec.Decode(&s.WriteBufferSizeInBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "WriteBufferSizeInBytes", err) } } diff --git a/typedapi/types/certificateinformation.go b/typedapi/types/certificateinformation.go index f794ec0f63..f994723aee 100644 --- a/typedapi/types/certificateinformation.go +++ b/typedapi/types/certificateinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CertificateInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ssl/certificates/types.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ssl/certificates/types.ts#L22-L31 type CertificateInformation struct { Alias string `json:"alias,omitempty"` Expiry DateTime `json:"expiry"` @@ -60,7 +61,7 @@ func (s *CertificateInformation) UnmarshalJSON(data []byte) error { case "alias": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Alias", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -71,13 +72,13 @@ func (s *CertificateInformation) UnmarshalJSON(data []byte) error { case "expiry": if err := dec.Decode(&s.Expiry); err != nil { - return err + return fmt.Errorf("%s | %w", "Expiry", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -93,7 +94,7 @@ func (s *CertificateInformation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "HasPrivateKey", err) } s.HasPrivateKey = value case bool: @@ -103,7 +104,7 @@ func (s *CertificateInformation) UnmarshalJSON(data []byte) error { case "issuer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Issuer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -115,7 +116,7 @@ func (s *CertificateInformation) UnmarshalJSON(data []byte) error { case "path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -127,7 +128,7 @@ func (s *CertificateInformation) UnmarshalJSON(data []byte) error { case "serial_number": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SerialNumber", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -139,7 +140,7 @@ func (s *CertificateInformation) UnmarshalJSON(data []byte) error { case "subject_dn": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SubjectDn", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/cgroup.go b/typedapi/types/cgroup.go index 0aee74cf37..2cd0fb1033 100644 --- a/typedapi/types/cgroup.go +++ b/typedapi/types/cgroup.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Cgroup type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L461-L474 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L461-L474 type Cgroup struct { // Cpu Contains statistics about `cpu` control group for the node. Cpu *CgroupCpu `json:"cpu,omitempty"` diff --git a/typedapi/types/cgroupcpu.go b/typedapi/types/cgroupcpu.go index d7f9271348..0bf4d2fabf 100644 --- a/typedapi/types/cgroupcpu.go +++ b/typedapi/types/cgroupcpu.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CgroupCpu type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L487-L504 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L487-L504 type CgroupCpu struct { // CfsPeriodMicros The period of time, in microseconds, for how regularly all tasks in the same // cgroup as the Elasticsearch process should have their access to CPU resources @@ -69,7 +70,7 @@ func (s *CgroupCpu) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CfsPeriodMicros", err) } s.CfsPeriodMicros = &value case float64: @@ -85,7 +86,7 @@ func (s *CgroupCpu) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CfsQuotaMicros", err) } s.CfsQuotaMicros = &value case float64: @@ -96,7 +97,7 @@ func (s *CgroupCpu) UnmarshalJSON(data []byte) error { case "control_group": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ControlGroup", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -107,7 +108,7 @@ func (s *CgroupCpu) UnmarshalJSON(data []byte) error { case "stat": if err := dec.Decode(&s.Stat); err != nil { - return err + return fmt.Errorf("%s | %w", "Stat", err) } } diff --git a/typedapi/types/cgroupcpustat.go b/typedapi/types/cgroupcpustat.go index 22fd77e876..035ecb65be 100644 --- a/typedapi/types/cgroupcpustat.go +++ b/typedapi/types/cgroupcpustat.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CgroupCpuStat type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L506-L519 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L506-L519 type CgroupCpuStat struct { // NumberOfElapsedPeriods The number of reporting periods (as specified by `cfs_period_micros`) that // have elapsed. @@ -65,7 +66,7 @@ func (s *CgroupCpuStat) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfElapsedPeriods", err) } s.NumberOfElapsedPeriods = &value case float64: @@ -80,7 +81,7 @@ func (s *CgroupCpuStat) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfTimesThrottled", err) } s.NumberOfTimesThrottled = &value case float64: @@ -90,7 +91,7 @@ func (s *CgroupCpuStat) UnmarshalJSON(data []byte) error { case "time_throttled_nanos": if err := dec.Decode(&s.TimeThrottledNanos); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeThrottledNanos", err) } } diff --git a/typedapi/types/cgroupmemory.go b/typedapi/types/cgroupmemory.go index f03d604cd9..6a1114d039 100644 --- a/typedapi/types/cgroupmemory.go +++ b/typedapi/types/cgroupmemory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CgroupMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L521-L537 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L521-L537 type CgroupMemory struct { // ControlGroup The `memory` control group to which the Elasticsearch process belongs. ControlGroup *string `json:"control_group,omitempty"` @@ -66,7 +67,7 @@ func (s *CgroupMemory) UnmarshalJSON(data []byte) error { case "control_group": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ControlGroup", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,7 +79,7 @@ func (s *CgroupMemory) UnmarshalJSON(data []byte) error { case "limit_in_bytes": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LimitInBytes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -90,7 +91,7 @@ func (s *CgroupMemory) UnmarshalJSON(data []byte) error { case "usage_in_bytes": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UsageInBytes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/chaininput.go b/typedapi/types/chaininput.go index 0c952ae1f1..cf6b0b08eb 100644 --- a/typedapi/types/chaininput.go +++ b/typedapi/types/chaininput.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ChainInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L35-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L35-L37 type ChainInput struct { Inputs []map[string]WatcherInput `json:"inputs"` } diff --git a/typedapi/types/charfilter.go b/typedapi/types/charfilter.go index d958c7d80b..6f37385d11 100644 --- a/typedapi/types/charfilter.go +++ b/typedapi/types/charfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // CharFilterDefinition // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/char_filters.ts#L28-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/char_filters.ts#L28-L30 type CharFilter interface{} diff --git a/typedapi/types/charfilterdefinition.go b/typedapi/types/charfilterdefinition.go index 18edf54504..34824b7333 100644 --- a/typedapi/types/charfilterdefinition.go +++ b/typedapi/types/charfilterdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -28,5 +28,5 @@ package types // IcuNormalizationCharFilter // KuromojiIterationMarkCharFilter // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/char_filters.ts#L32-L41 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/char_filters.ts#L32-L41 type CharFilterDefinition interface{} diff --git a/typedapi/types/charfilterdetail.go b/typedapi/types/charfilterdetail.go index 0299b0b575..231806819e 100644 --- a/typedapi/types/charfilterdetail.go +++ b/typedapi/types/charfilterdetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CharFilterDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/analyze/types.ts#L46-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/analyze/types.ts#L46-L49 type CharFilterDetail struct { FilteredText []string `json:"filtered_text"` Name string `json:"name"` @@ -53,13 +54,13 @@ func (s *CharFilterDetail) UnmarshalJSON(data []byte) error { case "filtered_text": if err := dec.Decode(&s.FilteredText); err != nil { - return err + return fmt.Errorf("%s | %w", "FilteredText", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/charfiltertypes.go b/typedapi/types/charfiltertypes.go index 3007d66515..483ad01285 100644 --- a/typedapi/types/charfiltertypes.go +++ b/typedapi/types/charfiltertypes.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // CharFilterTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L228-L261 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L228-L261 type CharFilterTypes struct { // AnalyzerTypes Contains statistics about analyzer types used in selected nodes. AnalyzerTypes []FieldTypes `json:"analyzer_types"` diff --git a/typedapi/types/chargrouptokenizer.go b/typedapi/types/chargrouptokenizer.go index 196bb58ede..4aec7ffb6f 100644 --- a/typedapi/types/chargrouptokenizer.go +++ b/typedapi/types/chargrouptokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CharGroupTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L56-L60 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L56-L60 type CharGroupTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` TokenizeOnChars []string `json:"tokenize_on_chars"` @@ -61,7 +62,7 @@ func (s *CharGroupTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTokenLength", err) } s.MaxTokenLength = &value case float64: @@ -71,17 +72,17 @@ func (s *CharGroupTokenizer) UnmarshalJSON(data []byte) error { case "tokenize_on_chars": if err := dec.Decode(&s.TokenizeOnChars); err != nil { - return err + return fmt.Errorf("%s | %w", "TokenizeOnChars", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/checkpointing.go b/typedapi/types/checkpointing.go index a216000736..621c981e3c 100644 --- a/typedapi/types/checkpointing.go +++ b/typedapi/types/checkpointing.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Checkpointing type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/get_transform_stats/types.ts#L85-L92 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/get_transform_stats/types.ts#L85-L92 type Checkpointing struct { ChangesLastDetectedAt *int64 `json:"changes_last_detected_at,omitempty"` ChangesLastDetectedAtDateTime DateTime `json:"changes_last_detected_at_date_time,omitempty"` @@ -62,7 +63,7 @@ func (s *Checkpointing) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ChangesLastDetectedAt", err) } s.ChangesLastDetectedAt = &value case float64: @@ -72,12 +73,12 @@ func (s *Checkpointing) UnmarshalJSON(data []byte) error { case "changes_last_detected_at_date_time": if err := dec.Decode(&s.ChangesLastDetectedAtDateTime); err != nil { - return err + return fmt.Errorf("%s | %w", "ChangesLastDetectedAtDateTime", err) } case "last": if err := dec.Decode(&s.Last); err != nil { - return err + return fmt.Errorf("%s | %w", "Last", err) } case "last_search_time": @@ -87,7 +88,7 @@ func (s *Checkpointing) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LastSearchTime", err) } s.LastSearchTime = &value case float64: @@ -97,7 +98,7 @@ func (s *Checkpointing) UnmarshalJSON(data []byte) error { case "next": if err := dec.Decode(&s.Next); err != nil { - return err + return fmt.Errorf("%s | %w", "Next", err) } case "operations_behind": @@ -107,7 +108,7 @@ func (s *Checkpointing) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "OperationsBehind", err) } s.OperationsBehind = &value case float64: diff --git a/typedapi/types/checkpointstats.go b/typedapi/types/checkpointstats.go index 0da573989b..7bb92fbaa1 100644 --- a/typedapi/types/checkpointstats.go +++ b/typedapi/types/checkpointstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CheckpointStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/get_transform_stats/types.ts#L76-L83 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/get_transform_stats/types.ts#L76-L83 type CheckpointStats struct { Checkpoint int64 `json:"checkpoint"` CheckpointProgress *TransformProgress `json:"checkpoint_progress,omitempty"` @@ -62,7 +63,7 @@ func (s *CheckpointStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Checkpoint", err) } s.Checkpoint = value case float64: @@ -72,27 +73,27 @@ func (s *CheckpointStats) UnmarshalJSON(data []byte) error { case "checkpoint_progress": if err := dec.Decode(&s.CheckpointProgress); err != nil { - return err + return fmt.Errorf("%s | %w", "CheckpointProgress", err) } case "time_upper_bound": if err := dec.Decode(&s.TimeUpperBound); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeUpperBound", err) } case "time_upper_bound_millis": if err := dec.Decode(&s.TimeUpperBoundMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeUpperBoundMillis", err) } case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } case "timestamp_millis": if err := dec.Decode(&s.TimestampMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TimestampMillis", err) } } diff --git a/typedapi/types/childrenaggregate.go b/typedapi/types/childrenaggregate.go index 7ee2caf534..38a71e63c3 100644 --- a/typedapi/types/childrenaggregate.go +++ b/typedapi/types/childrenaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // ChildrenAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L776-L777 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L776-L777 type ChildrenAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *ChildrenAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -71,7 +71,7 @@ func (s *ChildrenAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } default: @@ -88,490 +88,490 @@ func (s *ChildrenAggregate) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -581,7 +581,7 @@ func (s *ChildrenAggregate) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/childrenaggregation.go b/typedapi/types/childrenaggregation.go index de2e615764..a29a31c4e2 100644 --- a/typedapi/types/childrenaggregation.go +++ b/typedapi/types/childrenaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ChildrenAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L111-L116 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L111-L116 type ChildrenAggregation struct { Meta Metadata `json:"meta,omitempty"` Name *string `json:"name,omitempty"` @@ -55,13 +56,13 @@ func (s *ChildrenAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,7 +73,7 @@ func (s *ChildrenAggregation) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/chisquareheuristic.go b/typedapi/types/chisquareheuristic.go index 79f7e56c8a..447f1ff80d 100644 --- a/typedapi/types/chisquareheuristic.go +++ b/typedapi/types/chisquareheuristic.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ChiSquareHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L735-L744 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L735-L744 type ChiSquareHeuristic struct { // BackgroundIsSuperset Set to `false` if you defined a custom background filter that represents a // different set of documents that you want to compare to. @@ -62,7 +63,7 @@ func (s *ChiSquareHeuristic) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "BackgroundIsSuperset", err) } s.BackgroundIsSuperset = value case bool: @@ -76,7 +77,7 @@ func (s *ChiSquareHeuristic) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeNegatives", err) } s.IncludeNegatives = value case bool: diff --git a/typedapi/types/chunkingconfig.go b/typedapi/types/chunkingconfig.go index b8aaa88fd0..b101b3eeaf 100644 --- a/typedapi/types/chunkingconfig.go +++ b/typedapi/types/chunkingconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/chunkingmode" @@ -31,7 +32,7 @@ import ( // ChunkingConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Datafeed.ts#L239-L252 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Datafeed.ts#L239-L252 type ChunkingConfig struct { // Mode If the mode is `auto`, the chunk size is dynamically calculated; // this is the recommended value when the datafeed does not use aggregations. @@ -62,12 +63,12 @@ func (s *ChunkingConfig) UnmarshalJSON(data []byte) error { case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } case "time_span": if err := dec.Decode(&s.TimeSpan); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSpan", err) } } diff --git a/typedapi/types/circleprocessor.go b/typedapi/types/circleprocessor.go index 89a09d2690..8cc0534631 100644 --- a/typedapi/types/circleprocessor.go +++ b/typedapi/types/circleprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // CircleProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L410-L433 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L410-L433 type CircleProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -82,7 +83,7 @@ func (s *CircleProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -98,7 +99,7 @@ func (s *CircleProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ErrorDistance", err) } f := Float64(value) s.ErrorDistance = f @@ -109,13 +110,13 @@ func (s *CircleProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -131,7 +132,7 @@ func (s *CircleProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -145,7 +146,7 @@ func (s *CircleProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -154,18 +155,18 @@ func (s *CircleProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "shape_type": if err := dec.Decode(&s.ShapeType); err != nil { - return err + return fmt.Errorf("%s | %w", "ShapeType", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -176,7 +177,7 @@ func (s *CircleProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/classificationinferenceoptions.go b/typedapi/types/classificationinferenceoptions.go index 8fd1eaefad..082811ffbe 100644 --- a/typedapi/types/classificationinferenceoptions.go +++ b/typedapi/types/classificationinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClassificationInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L93-L108 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L93-L108 type ClassificationInferenceOptions struct { // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` @@ -71,7 +72,7 @@ func (s *ClassificationInferenceOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopClasses", err) } s.NumTopClasses = &value case float64: @@ -87,7 +88,7 @@ func (s *ClassificationInferenceOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopFeatureImportanceValues", err) } s.NumTopFeatureImportanceValues = &value case float64: @@ -98,7 +99,7 @@ func (s *ClassificationInferenceOptions) UnmarshalJSON(data []byte) error { case "prediction_field_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PredictionFieldType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -110,7 +111,7 @@ func (s *ClassificationInferenceOptions) UnmarshalJSON(data []byte) error { case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -122,7 +123,7 @@ func (s *ClassificationInferenceOptions) UnmarshalJSON(data []byte) error { case "top_classes_results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TopClassesResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/cleanuprepositoryresults.go b/typedapi/types/cleanuprepositoryresults.go index ac5e15fa5e..77543b8e43 100644 --- a/typedapi/types/cleanuprepositoryresults.go +++ b/typedapi/types/cleanuprepositoryresults.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CleanupRepositoryResults type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/cleanup_repository/SnapshotCleanupRepositoryResponse.ts#L29-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/cleanup_repository/SnapshotCleanupRepositoryResponse.ts#L29-L34 type CleanupRepositoryResults struct { // DeletedBlobs Number of binary large objects (blobs) removed during cleanup. DeletedBlobs int64 `json:"deleted_blobs"` @@ -60,7 +61,7 @@ func (s *CleanupRepositoryResults) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DeletedBlobs", err) } s.DeletedBlobs = value case float64: @@ -75,7 +76,7 @@ func (s *CleanupRepositoryResults) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DeletedBytes", err) } s.DeletedBytes = value case float64: diff --git a/typedapi/types/client.go b/typedapi/types/client.go index 79ec290687..f11ea6c5c6 100644 --- a/typedapi/types/client.go +++ b/typedapi/types/client.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Client type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L649-L696 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L649-L696 type Client struct { // Agent Reported agent for the HTTP client. // If unavailable, this property is not included in the response. @@ -76,7 +77,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { case "agent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Agent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -92,7 +93,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ClosedTimeMillis", err) } s.ClosedTimeMillis = &value case float64: @@ -107,7 +108,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } s.Id = &value case float64: @@ -122,7 +123,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LastRequestTimeMillis", err) } s.LastRequestTimeMillis = &value case float64: @@ -133,7 +134,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { case "last_uri": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LastUri", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -145,7 +146,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { case "local_address": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LocalAddress", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -161,7 +162,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "OpenedTimeMillis", err) } s.OpenedTimeMillis = &value case float64: @@ -172,7 +173,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { case "remote_address": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RemoteAddress", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -188,7 +189,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RequestCount", err) } s.RequestCount = &value case float64: @@ -203,7 +204,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RequestSizeBytes", err) } s.RequestSizeBytes = &value case float64: @@ -214,7 +215,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { case "x_opaque_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "XOpaqueId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/closeindexresult.go b/typedapi/types/closeindexresult.go index 6a870456b9..571ee7bb92 100644 --- a/typedapi/types/closeindexresult.go +++ b/typedapi/types/closeindexresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CloseIndexResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/close/CloseIndexResponse.ts#L32-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/close/CloseIndexResponse.ts#L32-L35 type CloseIndexResult struct { Closed bool `json:"closed"` Shards map[string]CloseShardResult `json:"shards,omitempty"` @@ -58,7 +59,7 @@ func (s *CloseIndexResult) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Closed", err) } s.Closed = value case bool: @@ -70,7 +71,7 @@ func (s *CloseIndexResult) UnmarshalJSON(data []byte) error { s.Shards = make(map[string]CloseShardResult, 0) } if err := dec.Decode(&s.Shards); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards", err) } } diff --git a/typedapi/types/closeshardresult.go b/typedapi/types/closeshardresult.go index aab5e46ab8..f798f0987e 100644 --- a/typedapi/types/closeshardresult.go +++ b/typedapi/types/closeshardresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // CloseShardResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/close/CloseIndexResponse.ts#L37-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/close/CloseIndexResponse.ts#L37-L39 type CloseShardResult struct { Failures []ShardFailure `json:"failures"` } diff --git a/typedapi/types/clusterappliedstats.go b/typedapi/types/clusterappliedstats.go index bbf585232c..4d0d12beb4 100644 --- a/typedapi/types/clusterappliedstats.go +++ b/typedapi/types/clusterappliedstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ClusterAppliedStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L221-L223 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L221-L223 type ClusterAppliedStats struct { Recordings []Recording `json:"recordings,omitempty"` } diff --git a/typedapi/types/clustercomponenttemplate.go b/typedapi/types/clustercomponenttemplate.go index 5c4fc5acf9..8c3a7a0f92 100644 --- a/typedapi/types/clustercomponenttemplate.go +++ b/typedapi/types/clustercomponenttemplate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ClusterComponentTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/_types/ComponentTemplate.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/_types/ComponentTemplate.ts#L30-L33 type ClusterComponentTemplate struct { ComponentTemplate ComponentTemplateNode `json:"component_template"` Name string `json:"name"` @@ -52,12 +53,12 @@ func (s *ClusterComponentTemplate) UnmarshalJSON(data []byte) error { case "component_template": if err := dec.Decode(&s.ComponentTemplate); err != nil { - return err + return fmt.Errorf("%s | %w", "ComponentTemplate", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/clusterdetails.go b/typedapi/types/clusterdetails.go index 154b9136aa..a73eeda3b6 100644 --- a/typedapi/types/clusterdetails.go +++ b/typedapi/types/clusterdetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // ClusterDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L45-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L45-L52 type ClusterDetails struct { Failures []ShardFailure `json:"failures,omitempty"` Indices string `json:"indices"` @@ -59,13 +60,13 @@ func (s *ClusterDetails) UnmarshalJSON(data []byte) error { case "failures": if err := dec.Decode(&s.Failures); err != nil { - return err + return fmt.Errorf("%s | %w", "Failures", err) } case "indices": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -76,12 +77,12 @@ func (s *ClusterDetails) UnmarshalJSON(data []byte) error { case "_shards": if err := dec.Decode(&s.Shards_); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards_", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "timed_out": @@ -91,7 +92,7 @@ func (s *ClusterDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimedOut", err) } s.TimedOut = value case bool: @@ -100,7 +101,7 @@ func (s *ClusterDetails) UnmarshalJSON(data []byte) error { case "took": if err := dec.Decode(&s.Took); err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } } diff --git a/typedapi/types/clusterfilesystem.go b/typedapi/types/clusterfilesystem.go index e1e599c8f1..8bafc89a44 100644 --- a/typedapi/types/clusterfilesystem.go +++ b/typedapi/types/clusterfilesystem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterFileSystem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L34-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L34-L49 type ClusterFileSystem struct { // AvailableInBytes Total number of bytes available to JVM in file stores across all selected // nodes. @@ -67,7 +68,7 @@ func (s *ClusterFileSystem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AvailableInBytes", err) } s.AvailableInBytes = value case float64: @@ -82,7 +83,7 @@ func (s *ClusterFileSystem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FreeInBytes", err) } s.FreeInBytes = value case float64: @@ -97,7 +98,7 @@ func (s *ClusterFileSystem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalInBytes", err) } s.TotalInBytes = value case float64: diff --git a/typedapi/types/clusterindexingpressure.go b/typedapi/types/clusterindexingpressure.go index bdff232f64..a0bad0a0c0 100644 --- a/typedapi/types/clusterindexingpressure.go +++ b/typedapi/types/clusterindexingpressure.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ClusterIndexingPressure type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L570-L572 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L570-L572 type ClusterIndexingPressure struct { Memory ClusterPressureMemory `json:"memory"` } diff --git a/typedapi/types/clusterindices.go b/typedapi/types/clusterindices.go index 04c546c9f0..5dcf397881 100644 --- a/typedapi/types/clusterindices.go +++ b/typedapi/types/clusterindices.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterIndices type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L74-L107 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L74-L107 type ClusterIndices struct { // Analysis Contains statistics about analyzers and analyzer components used in selected // nodes. @@ -75,12 +76,12 @@ func (s *ClusterIndices) UnmarshalJSON(data []byte) error { case "analysis": if err := dec.Decode(&s.Analysis); err != nil { - return err + return fmt.Errorf("%s | %w", "Analysis", err) } case "completion": if err := dec.Decode(&s.Completion); err != nil { - return err + return fmt.Errorf("%s | %w", "Completion", err) } case "count": @@ -90,7 +91,7 @@ func (s *ClusterIndices) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -100,42 +101,42 @@ func (s *ClusterIndices) UnmarshalJSON(data []byte) error { case "docs": if err := dec.Decode(&s.Docs); err != nil { - return err + return fmt.Errorf("%s | %w", "Docs", err) } case "fielddata": if err := dec.Decode(&s.Fielddata); err != nil { - return err + return fmt.Errorf("%s | %w", "Fielddata", err) } case "mappings": if err := dec.Decode(&s.Mappings); err != nil { - return err + return fmt.Errorf("%s | %w", "Mappings", err) } case "query_cache": if err := dec.Decode(&s.QueryCache); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryCache", err) } case "segments": if err := dec.Decode(&s.Segments); err != nil { - return err + return fmt.Errorf("%s | %w", "Segments", err) } case "shards": if err := dec.Decode(&s.Shards); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards", err) } case "store": if err := dec.Decode(&s.Store); err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } case "versions": if err := dec.Decode(&s.Versions); err != nil { - return err + return fmt.Errorf("%s | %w", "Versions", err) } } diff --git a/typedapi/types/clusterindicesshards.go b/typedapi/types/clusterindicesshards.go index 5c6b0126f9..2ed57b536a 100644 --- a/typedapi/types/clusterindicesshards.go +++ b/typedapi/types/clusterindicesshards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterIndicesShards type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L60-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L60-L72 type ClusterIndicesShards struct { // Index Contains statistics about shards assigned to selected nodes. Index *ClusterIndicesShardsIndex `json:"index,omitempty"` @@ -59,7 +60,7 @@ func (s *ClusterIndicesShards) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "primaries": @@ -69,7 +70,7 @@ func (s *ClusterIndicesShards) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Primaries", err) } f := Float64(value) s.Primaries = &f @@ -85,7 +86,7 @@ func (s *ClusterIndicesShards) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Replication", err) } f := Float64(value) s.Replication = &f @@ -101,7 +102,7 @@ func (s *ClusterIndicesShards) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } f := Float64(value) s.Total = &f diff --git a/typedapi/types/clusterindicesshardsindex.go b/typedapi/types/clusterindicesshardsindex.go index b8ef15c8ac..6663f1e859 100644 --- a/typedapi/types/clusterindicesshardsindex.go +++ b/typedapi/types/clusterindicesshardsindex.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ClusterIndicesShardsIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L51-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L51-L58 type ClusterIndicesShardsIndex struct { // Primaries Contains statistics about the number of primary shards assigned to selected // nodes. diff --git a/typedapi/types/clusterinfo.go b/typedapi/types/clusterinfo.go index 44a8905e75..306fee522d 100644 --- a/typedapi/types/clusterinfo.go +++ b/typedapi/types/clusterinfo.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ClusterInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/types.ts#L48-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/types.ts#L48-L54 type ClusterInfo struct { Nodes map[string]NodeDiskUsage `json:"nodes"` ReservedSizes []ReservedSize `json:"reserved_sizes"` diff --git a/typedapi/types/clusterinfotargets.go b/typedapi/types/clusterinfotargets.go index 369c438672..b123ea620a 100644 --- a/typedapi/types/clusterinfotargets.go +++ b/typedapi/types/clusterinfotargets.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ import ( // ClusterInfoTargets type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L386-L386 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L386-L386 type ClusterInfoTargets []clusterinfotarget.ClusterInfoTarget diff --git a/typedapi/types/clusteringest.go b/typedapi/types/clusteringest.go index 55c96bce91..3c361ff76a 100644 --- a/typedapi/types/clusteringest.go +++ b/typedapi/types/clusteringest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L270-L273 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L270-L273 type ClusterIngest struct { NumberOfPipelines int `json:"number_of_pipelines"` ProcessorStats map[string]ClusterProcessor `json:"processor_stats"` @@ -59,7 +60,7 @@ func (s *ClusterIngest) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfPipelines", err) } s.NumberOfPipelines = value case float64: @@ -72,7 +73,7 @@ func (s *ClusterIngest) UnmarshalJSON(data []byte) error { s.ProcessorStats = make(map[string]ClusterProcessor, 0) } if err := dec.Decode(&s.ProcessorStats); err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessorStats", err) } } diff --git a/typedapi/types/clusterjvm.go b/typedapi/types/clusterjvm.go index 48074c8681..80b062cbd7 100644 --- a/typedapi/types/clusterjvm.go +++ b/typedapi/types/clusterjvm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterJvm type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L275-L292 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L275-L292 type ClusterJvm struct { // MaxUptimeInMillis Uptime duration, in milliseconds, since JVM last started. MaxUptimeInMillis int64 `json:"max_uptime_in_millis"` @@ -59,12 +60,12 @@ func (s *ClusterJvm) UnmarshalJSON(data []byte) error { case "max_uptime_in_millis": if err := dec.Decode(&s.MaxUptimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxUptimeInMillis", err) } case "mem": if err := dec.Decode(&s.Mem); err != nil { - return err + return fmt.Errorf("%s | %w", "Mem", err) } case "threads": @@ -74,7 +75,7 @@ func (s *ClusterJvm) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Threads", err) } s.Threads = value case float64: @@ -84,7 +85,7 @@ func (s *ClusterJvm) UnmarshalJSON(data []byte) error { case "versions": if err := dec.Decode(&s.Versions); err != nil { - return err + return fmt.Errorf("%s | %w", "Versions", err) } } diff --git a/typedapi/types/clusterjvmmemory.go b/typedapi/types/clusterjvmmemory.go index cdf9400b7d..8596f3c50d 100644 --- a/typedapi/types/clusterjvmmemory.go +++ b/typedapi/types/clusterjvmmemory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterJvmMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L294-L303 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L294-L303 type ClusterJvmMemory struct { // HeapMaxInBytes Maximum amount of memory, in bytes, available for use by the heap across all // selected nodes. @@ -61,7 +62,7 @@ func (s *ClusterJvmMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "HeapMaxInBytes", err) } s.HeapMaxInBytes = value case float64: @@ -76,7 +77,7 @@ func (s *ClusterJvmMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "HeapUsedInBytes", err) } s.HeapUsedInBytes = value case float64: diff --git a/typedapi/types/clusterjvmversion.go b/typedapi/types/clusterjvmversion.go index ad72ef75e3..4367ff507d 100644 --- a/typedapi/types/clusterjvmversion.go +++ b/typedapi/types/clusterjvmversion.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterJvmVersion type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L305-L335 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L305-L335 type ClusterJvmVersion struct { // BundledJdk Always `true`. All distributions come with a bundled Java Development Kit // (JDK). @@ -73,7 +74,7 @@ func (s *ClusterJvmVersion) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "BundledJdk", err) } s.BundledJdk = value case bool: @@ -88,7 +89,7 @@ func (s *ClusterJvmVersion) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -103,7 +104,7 @@ func (s *ClusterJvmVersion) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UsingBundledJdk", err) } s.UsingBundledJdk = value case bool: @@ -112,13 +113,13 @@ func (s *ClusterJvmVersion) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "vm_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "VmName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -130,7 +131,7 @@ func (s *ClusterJvmVersion) UnmarshalJSON(data []byte) error { case "vm_vendor": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "VmVendor", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -141,7 +142,7 @@ func (s *ClusterJvmVersion) UnmarshalJSON(data []byte) error { case "vm_version": if err := dec.Decode(&s.VmVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "VmVersion", err) } } diff --git a/typedapi/types/clusternetworktypes.go b/typedapi/types/clusternetworktypes.go index e1775b181d..1681b63f73 100644 --- a/typedapi/types/clusternetworktypes.go +++ b/typedapi/types/clusternetworktypes.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ClusterNetworkTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L337-L346 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L337-L346 type ClusterNetworkTypes struct { // HttpTypes Contains statistics about the HTTP network types used by selected nodes. HttpTypes map[string]int `json:"http_types"` diff --git a/typedapi/types/clusternode.go b/typedapi/types/clusternode.go index 4e0b09ccee..332ead608c 100644 --- a/typedapi/types/clusternode.go +++ b/typedapi/types/clusternode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ClusterNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/ClusterNode.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/ClusterNode.ts#L22-L24 type ClusterNode struct { Name string `json:"name"` } @@ -51,7 +52,7 @@ func (s *ClusterNode) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/clusternodecount.go b/typedapi/types/clusternodecount.go index a1873bae61..9f5a0f5349 100644 --- a/typedapi/types/clusternodecount.go +++ b/typedapi/types/clusternodecount.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterNodeCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L348-L367 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L348-L367 type ClusterNodeCount struct { CoordinatingOnly int `json:"coordinating_only"` Data int `json:"data"` @@ -71,7 +72,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CoordinatingOnly", err) } s.CoordinatingOnly = value case float64: @@ -87,7 +88,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Data", err) } s.Data = value case float64: @@ -103,7 +104,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DataCold", err) } s.DataCold = value case float64: @@ -119,7 +120,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DataContent", err) } s.DataContent = value case float64: @@ -135,7 +136,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DataFrozen", err) } s.DataFrozen = &value case float64: @@ -151,7 +152,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DataHot", err) } s.DataHot = value case float64: @@ -167,7 +168,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DataWarm", err) } s.DataWarm = value case float64: @@ -183,7 +184,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Ingest", err) } s.Ingest = value case float64: @@ -199,7 +200,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Master", err) } s.Master = value case float64: @@ -215,7 +216,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Ml", err) } s.Ml = value case float64: @@ -231,7 +232,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RemoteClusterClient", err) } s.RemoteClusterClient = value case float64: @@ -247,7 +248,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: @@ -263,7 +264,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Transform", err) } s.Transform = value case float64: @@ -279,7 +280,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "VotingOnly", err) } s.VotingOnly = value case float64: diff --git a/typedapi/types/clusternodes.go b/typedapi/types/clusternodes.go index 3df5881362..c77068b680 100644 --- a/typedapi/types/clusternodes.go +++ b/typedapi/types/clusternodes.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ClusterNodes type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L369-L402 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L369-L402 type ClusterNodes struct { // Count Contains counts for nodes selected by the request’s node filters. Count ClusterNodeCount `json:"count"` diff --git a/typedapi/types/clusteroperatingsystem.go b/typedapi/types/clusteroperatingsystem.go index dd5d7c8b63..586a4cbd79 100644 --- a/typedapi/types/clusteroperatingsystem.go +++ b/typedapi/types/clusteroperatingsystem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterOperatingSystem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L415-L442 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L415-L442 type ClusterOperatingSystem struct { // AllocatedProcessors Number of processors used to calculate thread pool size across all selected // nodes. @@ -74,7 +75,7 @@ func (s *ClusterOperatingSystem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllocatedProcessors", err) } s.AllocatedProcessors = value case float64: @@ -84,7 +85,7 @@ func (s *ClusterOperatingSystem) UnmarshalJSON(data []byte) error { case "architectures": if err := dec.Decode(&s.Architectures); err != nil { - return err + return fmt.Errorf("%s | %w", "Architectures", err) } case "available_processors": @@ -95,7 +96,7 @@ func (s *ClusterOperatingSystem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AvailableProcessors", err) } s.AvailableProcessors = value case float64: @@ -105,17 +106,17 @@ func (s *ClusterOperatingSystem) UnmarshalJSON(data []byte) error { case "mem": if err := dec.Decode(&s.Mem); err != nil { - return err + return fmt.Errorf("%s | %w", "Mem", err) } case "names": if err := dec.Decode(&s.Names); err != nil { - return err + return fmt.Errorf("%s | %w", "Names", err) } case "pretty_names": if err := dec.Decode(&s.PrettyNames); err != nil { - return err + return fmt.Errorf("%s | %w", "PrettyNames", err) } } diff --git a/typedapi/types/clusteroperatingsystemarchitecture.go b/typedapi/types/clusteroperatingsystemarchitecture.go index 3f8602ab60..54f6c77890 100644 --- a/typedapi/types/clusteroperatingsystemarchitecture.go +++ b/typedapi/types/clusteroperatingsystemarchitecture.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterOperatingSystemArchitecture type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L404-L413 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L404-L413 type ClusterOperatingSystemArchitecture struct { // Arch Name of an architecture used by one or more selected nodes. Arch string `json:"arch"` @@ -56,7 +57,7 @@ func (s *ClusterOperatingSystemArchitecture) UnmarshalJSON(data []byte) error { case "arch": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Arch", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -73,7 +74,7 @@ func (s *ClusterOperatingSystemArchitecture) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: diff --git a/typedapi/types/clusteroperatingsystemname.go b/typedapi/types/clusteroperatingsystemname.go index f7db458e0a..4fc50399c2 100644 --- a/typedapi/types/clusteroperatingsystemname.go +++ b/typedapi/types/clusteroperatingsystemname.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterOperatingSystemName type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L444-L453 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L444-L453 type ClusterOperatingSystemName struct { // Count Number of selected nodes using the operating system. Count int `json:"count"` @@ -61,7 +62,7 @@ func (s *ClusterOperatingSystemName) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -71,7 +72,7 @@ func (s *ClusterOperatingSystemName) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/clusteroperatingsystemprettyname.go b/typedapi/types/clusteroperatingsystemprettyname.go index 363344c04b..3d26f7fc3b 100644 --- a/typedapi/types/clusteroperatingsystemprettyname.go +++ b/typedapi/types/clusteroperatingsystemprettyname.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterOperatingSystemPrettyName type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L455-L464 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L455-L464 type ClusterOperatingSystemPrettyName struct { // Count Number of selected nodes using the operating system. Count int `json:"count"` @@ -62,7 +63,7 @@ func (s *ClusterOperatingSystemPrettyName) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -72,7 +73,7 @@ func (s *ClusterOperatingSystemPrettyName) UnmarshalJSON(data []byte) error { case "pretty_name": if err := dec.Decode(&s.PrettyName); err != nil { - return err + return fmt.Errorf("%s | %w", "PrettyName", err) } } diff --git a/typedapi/types/clusterpressurememory.go b/typedapi/types/clusterpressurememory.go index 3a04a0bbdf..588e82c226 100644 --- a/typedapi/types/clusterpressurememory.go +++ b/typedapi/types/clusterpressurememory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterPressureMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L574-L578 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L574-L578 type ClusterPressureMemory struct { Current IndexingPressureMemorySummary `json:"current"` LimitInBytes int64 `json:"limit_in_bytes"` @@ -54,7 +55,7 @@ func (s *ClusterPressureMemory) UnmarshalJSON(data []byte) error { case "current": if err := dec.Decode(&s.Current); err != nil { - return err + return fmt.Errorf("%s | %w", "Current", err) } case "limit_in_bytes": @@ -64,7 +65,7 @@ func (s *ClusterPressureMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LimitInBytes", err) } s.LimitInBytes = value case float64: @@ -74,7 +75,7 @@ func (s *ClusterPressureMemory) UnmarshalJSON(data []byte) error { case "total": if err := dec.Decode(&s.Total); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } } diff --git a/typedapi/types/clusterprocess.go b/typedapi/types/clusterprocess.go index 5f1aba622a..5b6bea0d30 100644 --- a/typedapi/types/clusterprocess.go +++ b/typedapi/types/clusterprocess.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ClusterProcess type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L466-L475 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L466-L475 type ClusterProcess struct { // Cpu Contains statistics about CPU used by selected nodes. Cpu ClusterProcessCpu `json:"cpu"` diff --git a/typedapi/types/clusterprocesscpu.go b/typedapi/types/clusterprocesscpu.go index 58e3f73dbe..8c4140714c 100644 --- a/typedapi/types/clusterprocesscpu.go +++ b/typedapi/types/clusterprocesscpu.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterProcessCpu type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L477-L483 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L477-L483 type ClusterProcessCpu struct { // Percent Percentage of CPU used across all selected nodes. // Returns `-1` if not supported. @@ -60,7 +61,7 @@ func (s *ClusterProcessCpu) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Percent", err) } s.Percent = value case float64: diff --git a/typedapi/types/clusterprocessopenfiledescriptors.go b/typedapi/types/clusterprocessopenfiledescriptors.go index df86ee7d7d..70f4e4d8ee 100644 --- a/typedapi/types/clusterprocessopenfiledescriptors.go +++ b/typedapi/types/clusterprocessopenfiledescriptors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterProcessOpenFileDescriptors type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L485-L501 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L485-L501 type ClusterProcessOpenFileDescriptors struct { // Avg Average number of concurrently open file descriptors. // Returns `-1` if not supported. @@ -67,7 +68,7 @@ func (s *ClusterProcessOpenFileDescriptors) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Avg", err) } s.Avg = value case float64: @@ -82,7 +83,7 @@ func (s *ClusterProcessOpenFileDescriptors) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } s.Max = value case float64: @@ -97,7 +98,7 @@ func (s *ClusterProcessOpenFileDescriptors) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } s.Min = value case float64: diff --git a/typedapi/types/clusterprocessor.go b/typedapi/types/clusterprocessor.go index d760388dcb..1c1f66b8d5 100644 --- a/typedapi/types/clusterprocessor.go +++ b/typedapi/types/clusterprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L503-L509 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L503-L509 type ClusterProcessor struct { Count int64 `json:"count"` Current int64 `json:"current"` @@ -61,7 +62,7 @@ func (s *ClusterProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -76,7 +77,7 @@ func (s *ClusterProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Current", err) } s.Current = value case float64: @@ -91,7 +92,7 @@ func (s *ClusterProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Failed", err) } s.Failed = value case float64: @@ -101,12 +102,12 @@ func (s *ClusterProcessor) UnmarshalJSON(data []byte) error { case "time": if err := dec.Decode(&s.Time); err != nil { - return err + return fmt.Errorf("%s | %w", "Time", err) } case "time_in_millis": if err := dec.Decode(&s.TimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeInMillis", err) } } diff --git a/typedapi/types/clusterremoteinfo.go b/typedapi/types/clusterremoteinfo.go index 309849a988..11f54eb8c6 100644 --- a/typedapi/types/clusterremoteinfo.go +++ b/typedapi/types/clusterremoteinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // ClusterRemoteSniffInfo // ClusterRemoteProxyInfo // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L28-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L28-L29 type ClusterRemoteInfo interface{} diff --git a/typedapi/types/clusterremoteproxyinfo.go b/typedapi/types/clusterremoteproxyinfo.go index 93461496d2..3f47297225 100644 --- a/typedapi/types/clusterremoteproxyinfo.go +++ b/typedapi/types/clusterremoteproxyinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterRemoteProxyInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L41-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L41-L50 type ClusterRemoteProxyInfo struct { Connected bool `json:"connected"` InitialConnectTimeout Duration `json:"initial_connect_timeout"` @@ -64,7 +65,7 @@ func (s *ClusterRemoteProxyInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Connected", err) } s.Connected = value case bool: @@ -73,7 +74,7 @@ func (s *ClusterRemoteProxyInfo) UnmarshalJSON(data []byte) error { case "initial_connect_timeout": if err := dec.Decode(&s.InitialConnectTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "InitialConnectTimeout", err) } case "max_proxy_socket_connections": @@ -84,7 +85,7 @@ func (s *ClusterRemoteProxyInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxProxySocketConnections", err) } s.MaxProxySocketConnections = value case float64: @@ -94,7 +95,7 @@ func (s *ClusterRemoteProxyInfo) UnmarshalJSON(data []byte) error { case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } case "num_proxy_sockets_connected": @@ -105,7 +106,7 @@ func (s *ClusterRemoteProxyInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumProxySocketsConnected", err) } s.NumProxySocketsConnected = value case float64: @@ -116,7 +117,7 @@ func (s *ClusterRemoteProxyInfo) UnmarshalJSON(data []byte) error { case "proxy_address": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ProxyAddress", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -128,7 +129,7 @@ func (s *ClusterRemoteProxyInfo) UnmarshalJSON(data []byte) error { case "server_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ServerName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -144,7 +145,7 @@ func (s *ClusterRemoteProxyInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SkipUnavailable", err) } s.SkipUnavailable = value case bool: diff --git a/typedapi/types/clusterremotesniffinfo.go b/typedapi/types/clusterremotesniffinfo.go index beb035ddaf..2106131d74 100644 --- a/typedapi/types/clusterremotesniffinfo.go +++ b/typedapi/types/clusterremotesniffinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterRemoteSniffInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L31-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L31-L39 type ClusterRemoteSniffInfo struct { Connected bool `json:"connected"` InitialConnectTimeout Duration `json:"initial_connect_timeout"` @@ -63,7 +64,7 @@ func (s *ClusterRemoteSniffInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Connected", err) } s.Connected = value case bool: @@ -72,7 +73,7 @@ func (s *ClusterRemoteSniffInfo) UnmarshalJSON(data []byte) error { case "initial_connect_timeout": if err := dec.Decode(&s.InitialConnectTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "InitialConnectTimeout", err) } case "max_connections_per_cluster": @@ -83,7 +84,7 @@ func (s *ClusterRemoteSniffInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxConnectionsPerCluster", err) } s.MaxConnectionsPerCluster = value case float64: @@ -93,7 +94,7 @@ func (s *ClusterRemoteSniffInfo) UnmarshalJSON(data []byte) error { case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } case "num_nodes_connected": @@ -103,7 +104,7 @@ func (s *ClusterRemoteSniffInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumNodesConnected", err) } s.NumNodesConnected = value case float64: @@ -113,7 +114,7 @@ func (s *ClusterRemoteSniffInfo) UnmarshalJSON(data []byte) error { case "seeds": if err := dec.Decode(&s.Seeds); err != nil { - return err + return fmt.Errorf("%s | %w", "Seeds", err) } case "skip_unavailable": @@ -123,7 +124,7 @@ func (s *ClusterRemoteSniffInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SkipUnavailable", err) } s.SkipUnavailable = value case bool: diff --git a/typedapi/types/clusterruntimefieldtypes.go b/typedapi/types/clusterruntimefieldtypes.go index 2862da0ea7..2f5f6dbf0f 100644 --- a/typedapi/types/clusterruntimefieldtypes.go +++ b/typedapi/types/clusterruntimefieldtypes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterRuntimeFieldTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L169-L226 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L169-L226 type ClusterRuntimeFieldTypes struct { // CharsMax Maximum number of characters for a single runtime field script. CharsMax int `json:"chars_max"` @@ -90,7 +91,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CharsMax", err) } s.CharsMax = value case float64: @@ -106,7 +107,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CharsTotal", err) } s.CharsTotal = value case float64: @@ -122,7 +123,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -138,7 +139,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocMax", err) } s.DocMax = value case float64: @@ -154,7 +155,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocTotal", err) } s.DocTotal = value case float64: @@ -170,7 +171,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexCount", err) } s.IndexCount = value case float64: @@ -180,7 +181,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "lang": if err := dec.Decode(&s.Lang); err != nil { - return err + return fmt.Errorf("%s | %w", "Lang", err) } case "lines_max": @@ -191,7 +192,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "LinesMax", err) } s.LinesMax = value case float64: @@ -207,7 +208,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "LinesTotal", err) } s.LinesTotal = value case float64: @@ -217,7 +218,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "scriptless_count": @@ -228,7 +229,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptlessCount", err) } s.ScriptlessCount = value case float64: @@ -244,7 +245,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShadowedCount", err) } s.ShadowedCount = value case float64: @@ -260,7 +261,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SourceMax", err) } s.SourceMax = value case float64: @@ -276,7 +277,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SourceTotal", err) } s.SourceTotal = value case float64: diff --git a/typedapi/types/clustershardmetrics.go b/typedapi/types/clustershardmetrics.go index daccc3274e..e59f565ae3 100644 --- a/typedapi/types/clustershardmetrics.go +++ b/typedapi/types/clustershardmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterShardMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L511-L524 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L511-L524 type ClusterShardMetrics struct { // Avg Mean number of shards in an index, counting only shards assigned to selected // nodes. @@ -65,7 +66,7 @@ func (s *ClusterShardMetrics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Avg", err) } f := Float64(value) s.Avg = f @@ -81,7 +82,7 @@ func (s *ClusterShardMetrics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } f := Float64(value) s.Max = f @@ -97,7 +98,7 @@ func (s *ClusterShardMetrics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } f := Float64(value) s.Min = f diff --git a/typedapi/types/clusterstatequeue.go b/typedapi/types/clusterstatequeue.go index ac5968e702..a651e40645 100644 --- a/typedapi/types/clusterstatequeue.go +++ b/typedapi/types/clusterstatequeue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterStateQueue type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L248-L261 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L248-L261 type ClusterStateQueue struct { // Committed Number of committed cluster states in queue. Committed *int64 `json:"committed,omitempty"` @@ -62,7 +63,7 @@ func (s *ClusterStateQueue) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Committed", err) } s.Committed = &value case float64: @@ -77,7 +78,7 @@ func (s *ClusterStateQueue) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Pending", err) } s.Pending = &value case float64: @@ -92,7 +93,7 @@ func (s *ClusterStateQueue) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = &value case float64: diff --git a/typedapi/types/clusterstateupdate.go b/typedapi/types/clusterstateupdate.go index 5f71328016..ab441e774c 100644 --- a/typedapi/types/clusterstateupdate.go +++ b/typedapi/types/clusterstateupdate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterStateUpdate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L278-L343 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L278-L343 type ClusterStateUpdate struct { // CommitTime The cumulative amount of time spent waiting for a successful cluster state // update to commit, which measures the time from the start of each publication @@ -121,42 +122,42 @@ func (s *ClusterStateUpdate) UnmarshalJSON(data []byte) error { case "commit_time": if err := dec.Decode(&s.CommitTime); err != nil { - return err + return fmt.Errorf("%s | %w", "CommitTime", err) } case "commit_time_millis": if err := dec.Decode(&s.CommitTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "CommitTimeMillis", err) } case "completion_time": if err := dec.Decode(&s.CompletionTime); err != nil { - return err + return fmt.Errorf("%s | %w", "CompletionTime", err) } case "completion_time_millis": if err := dec.Decode(&s.CompletionTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "CompletionTimeMillis", err) } case "computation_time": if err := dec.Decode(&s.ComputationTime); err != nil { - return err + return fmt.Errorf("%s | %w", "ComputationTime", err) } case "computation_time_millis": if err := dec.Decode(&s.ComputationTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ComputationTimeMillis", err) } case "context_construction_time": if err := dec.Decode(&s.ContextConstructionTime); err != nil { - return err + return fmt.Errorf("%s | %w", "ContextConstructionTime", err) } case "context_construction_time_millis": if err := dec.Decode(&s.ContextConstructionTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ContextConstructionTimeMillis", err) } case "count": @@ -166,7 +167,7 @@ func (s *ClusterStateUpdate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -176,32 +177,32 @@ func (s *ClusterStateUpdate) UnmarshalJSON(data []byte) error { case "master_apply_time": if err := dec.Decode(&s.MasterApplyTime); err != nil { - return err + return fmt.Errorf("%s | %w", "MasterApplyTime", err) } case "master_apply_time_millis": if err := dec.Decode(&s.MasterApplyTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "MasterApplyTimeMillis", err) } case "notification_time": if err := dec.Decode(&s.NotificationTime); err != nil { - return err + return fmt.Errorf("%s | %w", "NotificationTime", err) } case "notification_time_millis": if err := dec.Decode(&s.NotificationTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "NotificationTimeMillis", err) } case "publication_time": if err := dec.Decode(&s.PublicationTime); err != nil { - return err + return fmt.Errorf("%s | %w", "PublicationTime", err) } case "publication_time_millis": if err := dec.Decode(&s.PublicationTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "PublicationTimeMillis", err) } } diff --git a/typedapi/types/clusterstatistics.go b/typedapi/types/clusterstatistics.go index bc434cde90..e54c976b4e 100644 --- a/typedapi/types/clusterstatistics.go +++ b/typedapi/types/clusterstatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ClusterStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L27-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L27-L35 type ClusterStatistics struct { Details map[string]ClusterDetails `json:"details,omitempty"` Failed int `json:"failed"` @@ -61,7 +62,7 @@ func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { s.Details = make(map[string]ClusterDetails, 0) } if err := dec.Decode(&s.Details); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } case "failed": @@ -72,7 +73,7 @@ func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Failed", err) } s.Failed = value case float64: @@ -88,7 +89,7 @@ func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Partial", err) } s.Partial = value case float64: @@ -104,7 +105,7 @@ func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Running", err) } s.Running = value case float64: @@ -120,7 +121,7 @@ func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Skipped", err) } s.Skipped = value case float64: @@ -136,7 +137,7 @@ func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Successful", err) } s.Successful = value case float64: @@ -152,7 +153,7 @@ func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: diff --git a/typedapi/types/collector.go b/typedapi/types/collector.go index ed66493ba3..ccf567486e 100644 --- a/typedapi/types/collector.go +++ b/typedapi/types/collector.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Collector type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/profile.ts#L86-L91 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/profile.ts#L86-L91 type Collector struct { Children []Collector `json:"children,omitempty"` Name string `json:"name"` @@ -55,13 +56,13 @@ func (s *Collector) UnmarshalJSON(data []byte) error { case "children": if err := dec.Decode(&s.Children); err != nil { - return err + return fmt.Errorf("%s | %w", "Children", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -73,7 +74,7 @@ func (s *Collector) UnmarshalJSON(data []byte) error { case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,7 +85,7 @@ func (s *Collector) UnmarshalJSON(data []byte) error { case "time_in_nanos": if err := dec.Decode(&s.TimeInNanos); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeInNanos", err) } } diff --git a/typedapi/types/column.go b/typedapi/types/column.go index a0f5c95b38..c8fe722374 100644 --- a/typedapi/types/column.go +++ b/typedapi/types/column.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Column type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/sql/types.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/sql/types.ts#L23-L26 type Column struct { Name string `json:"name"` Type string `json:"type"` @@ -53,13 +54,13 @@ func (s *Column) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/combinedfieldsquery.go b/typedapi/types/combinedfieldsquery.go index 4ed09fb3da..abb93189ff 100644 --- a/typedapi/types/combinedfieldsquery.go +++ b/typedapi/types/combinedfieldsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // CombinedFieldsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/abstractions.ts#L445-L479 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/abstractions.ts#L445-L479 type CombinedFieldsQuery struct { // AutoGenerateSynonymsPhraseQuery If true, match phrase queries are automatically created for multi-term // synonyms. @@ -83,7 +84,7 @@ func (s *CombinedFieldsQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AutoGenerateSynonymsPhraseQuery", err) } s.AutoGenerateSynonymsPhraseQuery = &value case bool: @@ -97,7 +98,7 @@ func (s *CombinedFieldsQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -108,23 +109,23 @@ func (s *CombinedFieldsQuery) UnmarshalJSON(data []byte) error { case "fields": if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "minimum_should_match": if err := dec.Decode(&s.MinimumShouldMatch); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatch", err) } case "operator": if err := dec.Decode(&s.Operator); err != nil { - return err + return fmt.Errorf("%s | %w", "Operator", err) } case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -136,7 +137,7 @@ func (s *CombinedFieldsQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -147,7 +148,7 @@ func (s *CombinedFieldsQuery) UnmarshalJSON(data []byte) error { case "zero_terms_query": if err := dec.Decode(&s.ZeroTermsQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "ZeroTermsQuery", err) } } diff --git a/typedapi/types/command.go b/typedapi/types/command.go index af85044d0b..2446dd5b8b 100644 --- a/typedapi/types/command.go +++ b/typedapi/types/command.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Command type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/reroute/types.ts#L22-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/reroute/types.ts#L22-L43 type Command struct { // AllocateEmptyPrimary Allocate an empty primary shard to a node. Accepts the index and shard for // index name and shard number, and node to allocate the shard to. Using this diff --git a/typedapi/types/commandallocateprimaryaction.go b/typedapi/types/commandallocateprimaryaction.go index 9fcbff10b6..89fe2dfd27 100644 --- a/typedapi/types/commandallocateprimaryaction.go +++ b/typedapi/types/commandallocateprimaryaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CommandAllocatePrimaryAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/reroute/types.ts#L78-L84 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/reroute/types.ts#L78-L84 type CommandAllocatePrimaryAction struct { // AcceptDataLoss If a node which has a copy of the data rejoins the cluster later on, that // data will be deleted. To ensure that these implications are well-understood, @@ -63,7 +64,7 @@ func (s *CommandAllocatePrimaryAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AcceptDataLoss", err) } s.AcceptDataLoss = value case bool: @@ -72,13 +73,13 @@ func (s *CommandAllocatePrimaryAction) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "node": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -95,7 +96,7 @@ func (s *CommandAllocatePrimaryAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Shard", err) } s.Shard = value case float64: diff --git a/typedapi/types/commandallocatereplicaaction.go b/typedapi/types/commandallocatereplicaaction.go index 5c879dd169..eeedde6673 100644 --- a/typedapi/types/commandallocatereplicaaction.go +++ b/typedapi/types/commandallocatereplicaaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CommandAllocateReplicaAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/reroute/types.ts#L69-L76 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/reroute/types.ts#L69-L76 type CommandAllocateReplicaAction struct { Index string `json:"index"` Node string `json:"node"` @@ -54,13 +55,13 @@ func (s *CommandAllocateReplicaAction) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "node": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -77,7 +78,7 @@ func (s *CommandAllocateReplicaAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Shard", err) } s.Shard = value case float64: diff --git a/typedapi/types/commandcancelaction.go b/typedapi/types/commandcancelaction.go index 624407f7f4..ec90e15be6 100644 --- a/typedapi/types/commandcancelaction.go +++ b/typedapi/types/commandcancelaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CommandCancelAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/reroute/types.ts#L45-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/reroute/types.ts#L45-L50 type CommandCancelAction struct { AllowPrimary *bool `json:"allow_primary,omitempty"` Index string `json:"index"` @@ -60,7 +61,7 @@ func (s *CommandCancelAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowPrimary", err) } s.AllowPrimary = &value case bool: @@ -69,13 +70,13 @@ func (s *CommandCancelAction) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "node": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -92,7 +93,7 @@ func (s *CommandCancelAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Shard", err) } s.Shard = value case float64: diff --git a/typedapi/types/commandmoveaction.go b/typedapi/types/commandmoveaction.go index 75ff930d70..ac828c8215 100644 --- a/typedapi/types/commandmoveaction.go +++ b/typedapi/types/commandmoveaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CommandMoveAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/reroute/types.ts#L60-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/reroute/types.ts#L60-L67 type CommandMoveAction struct { // FromNode The node to move the shard from FromNode string `json:"from_node"` @@ -58,7 +59,7 @@ func (s *CommandMoveAction) UnmarshalJSON(data []byte) error { case "from_node": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FromNode", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -69,7 +70,7 @@ func (s *CommandMoveAction) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "shard": @@ -80,7 +81,7 @@ func (s *CommandMoveAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Shard", err) } s.Shard = value case float64: @@ -91,7 +92,7 @@ func (s *CommandMoveAction) UnmarshalJSON(data []byte) error { case "to_node": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ToNode", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/commongramstokenfilter.go b/typedapi/types/commongramstokenfilter.go index b05020859e..344b1bc397 100644 --- a/typedapi/types/commongramstokenfilter.go +++ b/typedapi/types/commongramstokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CommonGramsTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L173-L179 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L173-L179 type CommonGramsTokenFilter struct { CommonWords []string `json:"common_words,omitempty"` CommonWordsPath *string `json:"common_words_path,omitempty"` @@ -57,13 +58,13 @@ func (s *CommonGramsTokenFilter) UnmarshalJSON(data []byte) error { case "common_words": if err := dec.Decode(&s.CommonWords); err != nil { - return err + return fmt.Errorf("%s | %w", "CommonWords", err) } case "common_words_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CommonWordsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *CommonGramsTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreCase", err) } s.IgnoreCase = &value case bool: @@ -93,7 +94,7 @@ func (s *CommonGramsTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "QueryMode", err) } s.QueryMode = &value case bool: @@ -102,12 +103,12 @@ func (s *CommonGramsTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/commontermsquery.go b/typedapi/types/commontermsquery.go index 0982bbeb82..e87ae03c5d 100644 --- a/typedapi/types/commontermsquery.go +++ b/typedapi/types/commontermsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // CommonTermsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L34-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L34-L44 type CommonTermsQuery struct { Analyzer *string `json:"analyzer,omitempty"` // Boost Floating point number used to decrease or increase the relevance scores of @@ -76,7 +77,7 @@ func (s *CommonTermsQuery) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -92,7 +93,7 @@ func (s *CommonTermsQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -108,7 +109,7 @@ func (s *CommonTermsQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CutoffFrequency", err) } f := Float64(value) s.CutoffFrequency = &f @@ -119,23 +120,23 @@ func (s *CommonTermsQuery) UnmarshalJSON(data []byte) error { case "high_freq_operator": if err := dec.Decode(&s.HighFreqOperator); err != nil { - return err + return fmt.Errorf("%s | %w", "HighFreqOperator", err) } case "low_freq_operator": if err := dec.Decode(&s.LowFreqOperator); err != nil { - return err + return fmt.Errorf("%s | %w", "LowFreqOperator", err) } case "minimum_should_match": if err := dec.Decode(&s.MinimumShouldMatch); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatch", err) } case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -147,7 +148,7 @@ func (s *CommonTermsQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/compactnodeinfo.go b/typedapi/types/compactnodeinfo.go index 31a59f9ccc..96f4237414 100644 --- a/typedapi/types/compactnodeinfo.go +++ b/typedapi/types/compactnodeinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // CompactNodeInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/verify_repository/SnapshotVerifyRepositoryResponse.ts#L27-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/verify_repository/SnapshotVerifyRepositoryResponse.ts#L27-L29 type CompactNodeInfo struct { Name string `json:"name"` } @@ -51,7 +52,7 @@ func (s *CompactNodeInfo) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/completioncontext.go b/typedapi/types/completioncontext.go index f79517cbde..c14b3abf07 100644 --- a/typedapi/types/completioncontext.go +++ b/typedapi/types/completioncontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CompletionContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L232-L261 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L232-L261 type CompletionContext struct { // Boost The factor by which the score of the suggestion should be boosted. // The score is computed by multiplying the boost with the suggestion weight. @@ -79,7 +80,7 @@ func (s *CompletionContext) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -104,18 +105,18 @@ func (s *CompletionContext) UnmarshalJSON(data []byte) error { default: if err := localDec.Decode(&s.Context); err != nil { - return err + return fmt.Errorf("%s | %w", "Context", err) } } case "neighbours": if err := dec.Decode(&s.Neighbours); err != nil { - return err + return fmt.Errorf("%s | %w", "Neighbours", err) } case "precision": if err := dec.Decode(&s.Precision); err != nil { - return err + return fmt.Errorf("%s | %w", "Precision", err) } case "prefix": @@ -125,7 +126,7 @@ func (s *CompletionContext) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Prefix", err) } s.Prefix = &value case bool: diff --git a/typedapi/types/completionproperty.go b/typedapi/types/completionproperty.go index f4494932c3..42c1db051f 100644 --- a/typedapi/types/completionproperty.go +++ b/typedapi/types/completionproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // CompletionProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/specialized.ts#L27-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/specialized.ts#L27-L35 type CompletionProperty struct { Analyzer *string `json:"analyzer,omitempty"` Contexts []SuggestContext `json:"contexts,omitempty"` @@ -71,7 +72,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,7 +83,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case "contexts": if err := dec.Decode(&s.Contexts); err != nil { - return err + return fmt.Errorf("%s | %w", "Contexts", err) } case "copy_to": @@ -91,13 +92,13 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -108,7 +109,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -117,7 +118,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -435,7 +436,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -451,7 +452,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxInputLength", err) } s.MaxInputLength = &value case float64: @@ -464,7 +465,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "preserve_position_increments": @@ -474,7 +475,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PreservePositionIncrements", err) } s.PreservePositionIncrements = &value case bool: @@ -488,7 +489,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PreserveSeparators", err) } s.PreserveSeparators = &value case bool: @@ -805,7 +806,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case "search_analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchAnalyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -817,7 +818,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -833,7 +834,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -842,7 +843,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/completionstats.go b/typedapi/types/completionstats.go index 0f0e6b4dd1..ae5ff540a1 100644 --- a/typedapi/types/completionstats.go +++ b/typedapi/types/completionstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CompletionStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L80-L90 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L80-L90 type CompletionStats struct { Fields map[string]FieldSizeUsage `json:"fields,omitempty"` // Size Total amount of memory used for completion across all shards assigned to @@ -61,12 +62,12 @@ func (s *CompletionStats) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]FieldSizeUsage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "size": if err := dec.Decode(&s.Size); err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } case "size_in_bytes": @@ -76,7 +77,7 @@ func (s *CompletionStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SizeInBytes", err) } s.SizeInBytes = value case float64: diff --git a/typedapi/types/completionsuggest.go b/typedapi/types/completionsuggest.go index 4582bcb53a..39508d29ea 100644 --- a/typedapi/types/completionsuggest.go +++ b/typedapi/types/completionsuggest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CompletionSuggest type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L48-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L48-L55 type CompletionSuggest struct { Length int `json:"length"` Offset int `json:"offset"` @@ -61,7 +62,7 @@ func (s *CompletionSuggest) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Length", err) } s.Length = value case float64: @@ -77,7 +78,7 @@ func (s *CompletionSuggest) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Offset", err) } s.Offset = value case float64: @@ -91,20 +92,20 @@ func (s *CompletionSuggest) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewCompletionSuggestOption() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Options", err) } s.Options = append(s.Options, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Options); err != nil { - return err + return fmt.Errorf("%s | %w", "Options", err) } } case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/completionsuggester.go b/typedapi/types/completionsuggester.go index bb573fc87d..2ee5985532 100644 --- a/typedapi/types/completionsuggester.go +++ b/typedapi/types/completionsuggester.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CompletionSuggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L160-L178 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L160-L178 type CompletionSuggester struct { // Analyzer The analyzer to analyze the suggest text with. // Defaults to the search analyzer of the suggest field. @@ -70,7 +71,7 @@ func (s *CompletionSuggester) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -91,14 +92,14 @@ func (s *CompletionSuggester) UnmarshalJSON(data []byte) error { o := NewCompletionContext() err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) if err != nil { - return err + return fmt.Errorf("%s | %w", "Contexts", err) } s.Contexts[key] = append(s.Contexts[key], *o) default: o := []CompletionContext{} err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) if err != nil { - return err + return fmt.Errorf("%s | %w", "Contexts", err) } s.Contexts[key] = o } @@ -106,17 +107,17 @@ func (s *CompletionSuggester) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "fuzzy": if err := dec.Decode(&s.Fuzzy); err != nil { - return err + return fmt.Errorf("%s | %w", "Fuzzy", err) } case "regex": if err := dec.Decode(&s.Regex); err != nil { - return err + return fmt.Errorf("%s | %w", "Regex", err) } case "size": @@ -127,7 +128,7 @@ func (s *CompletionSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -142,7 +143,7 @@ func (s *CompletionSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SkipDuplicates", err) } s.SkipDuplicates = &value case bool: diff --git a/typedapi/types/completionsuggestoption.go b/typedapi/types/completionsuggestoption.go index 53a5ada62f..437b7b9d40 100644 --- a/typedapi/types/completionsuggestoption.go +++ b/typedapi/types/completionsuggestoption.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CompletionSuggestOption type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L73-L84 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L73-L84 type CompletionSuggestOption struct { CollateMatch *bool `json:"collate_match,omitempty"` Contexts map[string][]Context `json:"contexts,omitempty"` @@ -66,7 +67,7 @@ func (s *CompletionSuggestOption) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CollateMatch", err) } s.CollateMatch = &value case bool: @@ -78,7 +79,7 @@ func (s *CompletionSuggestOption) UnmarshalJSON(data []byte) error { s.Contexts = make(map[string][]Context, 0) } if err := dec.Decode(&s.Contexts); err != nil { - return err + return fmt.Errorf("%s | %w", "Contexts", err) } case "fields": @@ -86,13 +87,13 @@ func (s *CompletionSuggestOption) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -103,12 +104,12 @@ func (s *CompletionSuggestOption) UnmarshalJSON(data []byte) error { case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "_routing": if err := dec.Decode(&s.Routing_); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing_", err) } case "score": @@ -118,7 +119,7 @@ func (s *CompletionSuggestOption) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Score", err) } f := Float64(value) s.Score = &f @@ -134,7 +135,7 @@ func (s *CompletionSuggestOption) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Score_", err) } f := Float64(value) s.Score_ = &f @@ -145,13 +146,13 @@ func (s *CompletionSuggestOption) UnmarshalJSON(data []byte) error { case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/componenttemplatenode.go b/typedapi/types/componenttemplatenode.go index 15f90e17fd..6222efaf5f 100644 --- a/typedapi/types/componenttemplatenode.go +++ b/typedapi/types/componenttemplatenode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ComponentTemplateNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/_types/ComponentTemplate.ts#L35-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/_types/ComponentTemplate.ts#L35-L40 type ComponentTemplateNode struct { Meta_ Metadata `json:"_meta,omitempty"` Template ComponentTemplateSummary `json:"template"` @@ -53,17 +54,17 @@ func (s *ComponentTemplateNode) UnmarshalJSON(data []byte) error { case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "template": if err := dec.Decode(&s.Template); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/componenttemplatesummary.go b/typedapi/types/componenttemplatesummary.go index 33c900a88c..905ee24b65 100644 --- a/typedapi/types/componenttemplatesummary.go +++ b/typedapi/types/componenttemplatesummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ComponentTemplateSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/_types/ComponentTemplate.ts#L42-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/_types/ComponentTemplate.ts#L42-L54 type ComponentTemplateSummary struct { Aliases map[string]AliasDefinition `json:"aliases,omitempty"` Lifecycle *DataStreamLifecycleWithRollover `json:"lifecycle,omitempty"` @@ -59,22 +60,22 @@ func (s *ComponentTemplateSummary) UnmarshalJSON(data []byte) error { s.Aliases = make(map[string]AliasDefinition, 0) } if err := dec.Decode(&s.Aliases); err != nil { - return err + return fmt.Errorf("%s | %w", "Aliases", err) } case "lifecycle": if err := dec.Decode(&s.Lifecycle); err != nil { - return err + return fmt.Errorf("%s | %w", "Lifecycle", err) } case "mappings": if err := dec.Decode(&s.Mappings); err != nil { - return err + return fmt.Errorf("%s | %w", "Mappings", err) } case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "settings": @@ -82,12 +83,12 @@ func (s *ComponentTemplateSummary) UnmarshalJSON(data []byte) error { s.Settings = make(map[string]IndexSettings, 0) } if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/compositeaggregate.go b/typedapi/types/compositeaggregate.go index 082a9c69bf..8f47896a20 100644 --- a/typedapi/types/compositeaggregate.go +++ b/typedapi/types/compositeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // CompositeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L618-L623 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L618-L623 type CompositeAggregate struct { AfterKey CompositeAggregateKey `json:"after_key,omitempty"` Buckets BucketsCompositeBucket `json:"buckets"` @@ -53,7 +54,7 @@ func (s *CompositeAggregate) UnmarshalJSON(data []byte) error { case "after_key": if err := dec.Decode(&s.AfterKey); err != nil { - return err + return fmt.Errorf("%s | %w", "AfterKey", err) } case "buckets": @@ -66,20 +67,20 @@ func (s *CompositeAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]CompositeBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []CompositeBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/compositeaggregatekey.go b/typedapi/types/compositeaggregatekey.go index 7f2c6a641f..0ef7d50bcd 100644 --- a/typedapi/types/compositeaggregatekey.go +++ b/typedapi/types/compositeaggregatekey.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // CompositeAggregateKey type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L118-L118 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L118-L118 type CompositeAggregateKey map[string]FieldValue diff --git a/typedapi/types/compositeaggregation.go b/typedapi/types/compositeaggregation.go index 82ab1749fb..ef5b37d76f 100644 --- a/typedapi/types/compositeaggregation.go +++ b/typedapi/types/compositeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CompositeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L120-L136 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L120-L136 type CompositeAggregation struct { // After When paginating, use the `after_key` value returned in the previous response // to retrieve the next page. @@ -61,18 +62,18 @@ func (s *CompositeAggregation) UnmarshalJSON(data []byte) error { case "after": if err := dec.Decode(&s.After); err != nil { - return err + return fmt.Errorf("%s | %w", "After", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -89,7 +90,7 @@ func (s *CompositeAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -99,7 +100,7 @@ func (s *CompositeAggregation) UnmarshalJSON(data []byte) error { case "sources": if err := dec.Decode(&s.Sources); err != nil { - return err + return fmt.Errorf("%s | %w", "Sources", err) } } diff --git a/typedapi/types/compositeaggregationsource.go b/typedapi/types/compositeaggregationsource.go index 7b8af99cb4..0db70305be 100644 --- a/typedapi/types/compositeaggregationsource.go +++ b/typedapi/types/compositeaggregationsource.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // CompositeAggregationSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L138-L155 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L138-L155 type CompositeAggregationSource struct { // DateHistogram A date histogram aggregation. DateHistogram *CompositeDateHistogramAggregation `json:"date_histogram,omitempty"` diff --git a/typedapi/types/compositebucket.go b/typedapi/types/compositebucket.go index 050a5bfa16..8c8244c511 100644 --- a/typedapi/types/compositebucket.go +++ b/typedapi/types/compositebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // CompositeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L625-L627 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L625-L627 type CompositeBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *CompositeBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -71,7 +71,7 @@ func (s *CompositeBucket) UnmarshalJSON(data []byte) error { case "key": if err := dec.Decode(&s.Key); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } default: @@ -88,490 +88,490 @@ func (s *CompositeBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -581,7 +581,7 @@ func (s *CompositeBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/compositedatehistogramaggregation.go b/typedapi/types/compositedatehistogramaggregation.go index 6ddf67a4d1..4959facf33 100644 --- a/typedapi/types/compositedatehistogramaggregation.go +++ b/typedapi/types/compositedatehistogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // CompositeDateHistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L174-L182 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L174-L182 type CompositeDateHistogramAggregation struct { // CalendarInterval Either `calendar_interval` or `fixed_interval` must be present CalendarInterval *string `json:"calendar_interval,omitempty"` @@ -70,23 +71,23 @@ func (s *CompositeDateHistogramAggregation) UnmarshalJSON(data []byte) error { case "calendar_interval": if err := dec.Decode(&s.CalendarInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "CalendarInterval", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "fixed_interval": if err := dec.Decode(&s.FixedInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "FixedInterval", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -102,7 +103,7 @@ func (s *CompositeDateHistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MissingBucket", err) } s.MissingBucket = &value case bool: @@ -111,23 +112,23 @@ func (s *CompositeDateHistogramAggregation) UnmarshalJSON(data []byte) error { case "missing_order": if err := dec.Decode(&s.MissingOrder); err != nil { - return err + return fmt.Errorf("%s | %w", "MissingOrder", err) } case "offset": if err := dec.Decode(&s.Offset); err != nil { - return err + return fmt.Errorf("%s | %w", "Offset", err) } case "order": if err := dec.Decode(&s.Order); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -136,7 +137,7 @@ func (s *CompositeDateHistogramAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -145,7 +146,7 @@ func (s *CompositeDateHistogramAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -153,7 +154,7 @@ func (s *CompositeDateHistogramAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -162,12 +163,12 @@ func (s *CompositeDateHistogramAggregation) UnmarshalJSON(data []byte) error { case "time_zone": if err := dec.Decode(&s.TimeZone); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeZone", err) } case "value_type": if err := dec.Decode(&s.ValueType); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueType", err) } } diff --git a/typedapi/types/compositegeotilegridaggregation.go b/typedapi/types/compositegeotilegridaggregation.go index 57ebf5f3b1..16da00a652 100644 --- a/typedapi/types/compositegeotilegridaggregation.go +++ b/typedapi/types/compositegeotilegridaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // CompositeGeoTileGridAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L184-L187 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L184-L187 type CompositeGeoTileGridAggregation struct { Bounds GeoBounds `json:"bounds,omitempty"` // Field Either `field` or `script` must be present @@ -65,12 +66,12 @@ func (s *CompositeGeoTileGridAggregation) UnmarshalJSON(data []byte) error { case "bounds": if err := dec.Decode(&s.Bounds); err != nil { - return err + return fmt.Errorf("%s | %w", "Bounds", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "missing_bucket": @@ -80,7 +81,7 @@ func (s *CompositeGeoTileGridAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MissingBucket", err) } s.MissingBucket = &value case bool: @@ -89,12 +90,12 @@ func (s *CompositeGeoTileGridAggregation) UnmarshalJSON(data []byte) error { case "missing_order": if err := dec.Decode(&s.MissingOrder); err != nil { - return err + return fmt.Errorf("%s | %w", "MissingOrder", err) } case "order": if err := dec.Decode(&s.Order); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } case "precision": @@ -105,7 +106,7 @@ func (s *CompositeGeoTileGridAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Precision", err) } s.Precision = &value case float64: @@ -116,7 +117,7 @@ func (s *CompositeGeoTileGridAggregation) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -125,7 +126,7 @@ func (s *CompositeGeoTileGridAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -134,7 +135,7 @@ func (s *CompositeGeoTileGridAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -142,7 +143,7 @@ func (s *CompositeGeoTileGridAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -151,7 +152,7 @@ func (s *CompositeGeoTileGridAggregation) UnmarshalJSON(data []byte) error { case "value_type": if err := dec.Decode(&s.ValueType); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueType", err) } } diff --git a/typedapi/types/compositehistogramaggregation.go b/typedapi/types/compositehistogramaggregation.go index 320207244c..d7a9e62657 100644 --- a/typedapi/types/compositehistogramaggregation.go +++ b/typedapi/types/compositehistogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // CompositeHistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L170-L172 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L170-L172 type CompositeHistogramAggregation struct { // Field Either `field` or `script` must be present Field *string `json:"field,omitempty"` @@ -64,7 +65,7 @@ func (s *CompositeHistogramAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "interval": @@ -74,7 +75,7 @@ func (s *CompositeHistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Interval", err) } f := Float64(value) s.Interval = f @@ -90,7 +91,7 @@ func (s *CompositeHistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MissingBucket", err) } s.MissingBucket = &value case bool: @@ -99,18 +100,18 @@ func (s *CompositeHistogramAggregation) UnmarshalJSON(data []byte) error { case "missing_order": if err := dec.Decode(&s.MissingOrder); err != nil { - return err + return fmt.Errorf("%s | %w", "MissingOrder", err) } case "order": if err := dec.Decode(&s.Order); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -119,7 +120,7 @@ func (s *CompositeHistogramAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -128,7 +129,7 @@ func (s *CompositeHistogramAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -136,7 +137,7 @@ func (s *CompositeHistogramAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -145,7 +146,7 @@ func (s *CompositeHistogramAggregation) UnmarshalJSON(data []byte) error { case "value_type": if err := dec.Decode(&s.ValueType); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueType", err) } } diff --git a/typedapi/types/compositetermsaggregation.go b/typedapi/types/compositetermsaggregation.go index 72ef8f7478..1ceeabf494 100644 --- a/typedapi/types/compositetermsaggregation.go +++ b/typedapi/types/compositetermsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // CompositeTermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L168-L168 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L168-L168 type CompositeTermsAggregation struct { // Field Either `field` or `script` must be present Field *string `json:"field,omitempty"` @@ -63,7 +64,7 @@ func (s *CompositeTermsAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "missing_bucket": @@ -73,7 +74,7 @@ func (s *CompositeTermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MissingBucket", err) } s.MissingBucket = &value case bool: @@ -82,18 +83,18 @@ func (s *CompositeTermsAggregation) UnmarshalJSON(data []byte) error { case "missing_order": if err := dec.Decode(&s.MissingOrder); err != nil { - return err + return fmt.Errorf("%s | %w", "MissingOrder", err) } case "order": if err := dec.Decode(&s.Order); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -102,7 +103,7 @@ func (s *CompositeTermsAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -111,7 +112,7 @@ func (s *CompositeTermsAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -119,7 +120,7 @@ func (s *CompositeTermsAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -128,7 +129,7 @@ func (s *CompositeTermsAggregation) UnmarshalJSON(data []byte) error { case "value_type": if err := dec.Decode(&s.ValueType); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueType", err) } } diff --git a/typedapi/types/conditiontokenfilter.go b/typedapi/types/conditiontokenfilter.go index 232390ca10..a1e94c3885 100644 --- a/typedapi/types/conditiontokenfilter.go +++ b/typedapi/types/conditiontokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ConditionTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L181-L185 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L181-L185 type ConditionTokenFilter struct { Filter []string `json:"filter"` Script Script `json:"script"` @@ -54,13 +55,13 @@ func (s *ConditionTokenFilter) UnmarshalJSON(data []byte) error { case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -69,7 +70,7 @@ func (s *ConditionTokenFilter) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -78,7 +79,7 @@ func (s *ConditionTokenFilter) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -86,7 +87,7 @@ func (s *ConditionTokenFilter) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -95,12 +96,12 @@ func (s *ConditionTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/configuration.go b/typedapi/types/configuration.go index 0f3bc538d6..4d54a8cee0 100644 --- a/typedapi/types/configuration.go +++ b/typedapi/types/configuration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Configuration type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/_types/SnapshotLifecycle.ts#L99-L129 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/_types/SnapshotLifecycle.ts#L99-L129 type Configuration struct { // FeatureStates A list of feature states to be included in this snapshot. A list of features // available for inclusion in the snapshot and their descriptions be can be @@ -80,7 +81,7 @@ func (s *Configuration) UnmarshalJSON(data []byte) error { case "feature_states": if err := dec.Decode(&s.FeatureStates); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureStates", err) } case "ignore_unavailable": @@ -90,7 +91,7 @@ func (s *Configuration) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnavailable", err) } s.IgnoreUnavailable = &value case bool: @@ -104,7 +105,7 @@ func (s *Configuration) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeGlobalState", err) } s.IncludeGlobalState = &value case bool: @@ -117,19 +118,19 @@ func (s *Configuration) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "partial": @@ -139,7 +140,7 @@ func (s *Configuration) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Partial", err) } s.Partial = &value case bool: diff --git a/typedapi/types/configurations.go b/typedapi/types/configurations.go index aef4f8c080..13115025ab 100644 --- a/typedapi/types/configurations.go +++ b/typedapi/types/configurations.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Configurations type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/_types/Phase.ts#L50-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/_types/Phase.ts#L50-L54 type Configurations struct { Forcemerge *ForceMergeConfiguration `json:"forcemerge,omitempty"` Rollover *RolloverConditions `json:"rollover,omitempty"` diff --git a/typedapi/types/confusionmatrixitem.go b/typedapi/types/confusionmatrixitem.go index 5d1ca7cf82..1d67d263b7 100644 --- a/typedapi/types/confusionmatrixitem.go +++ b/typedapi/types/confusionmatrixitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ConfusionMatrixItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L125-L130 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L125-L130 type ConfusionMatrixItem struct { ActualClass string `json:"actual_class"` ActualClassDocCount int `json:"actual_class_doc_count"` @@ -55,7 +56,7 @@ func (s *ConfusionMatrixItem) UnmarshalJSON(data []byte) error { case "actual_class": if err := dec.Decode(&s.ActualClass); err != nil { - return err + return fmt.Errorf("%s | %w", "ActualClass", err) } case "actual_class_doc_count": @@ -66,7 +67,7 @@ func (s *ConfusionMatrixItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ActualClassDocCount", err) } s.ActualClassDocCount = value case float64: @@ -82,7 +83,7 @@ func (s *ConfusionMatrixItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OtherPredictedClassDocCount", err) } s.OtherPredictedClassDocCount = value case float64: @@ -92,7 +93,7 @@ func (s *ConfusionMatrixItem) UnmarshalJSON(data []byte) error { case "predicted_classes": if err := dec.Decode(&s.PredictedClasses); err != nil { - return err + return fmt.Errorf("%s | %w", "PredictedClasses", err) } } diff --git a/typedapi/types/confusionmatrixprediction.go b/typedapi/types/confusionmatrixprediction.go index 951e1b43ac..bb23f75115 100644 --- a/typedapi/types/confusionmatrixprediction.go +++ b/typedapi/types/confusionmatrixprediction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ConfusionMatrixPrediction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L132-L135 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L132-L135 type ConfusionMatrixPrediction struct { Count int `json:"count"` PredictedClass string `json:"predicted_class"` @@ -59,7 +60,7 @@ func (s *ConfusionMatrixPrediction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -69,7 +70,7 @@ func (s *ConfusionMatrixPrediction) UnmarshalJSON(data []byte) error { case "predicted_class": if err := dec.Decode(&s.PredictedClass); err != nil { - return err + return fmt.Errorf("%s | %w", "PredictedClass", err) } } diff --git a/typedapi/types/confusionmatrixthreshold.go b/typedapi/types/confusionmatrixthreshold.go index 73a87e73c8..7ce540d296 100644 --- a/typedapi/types/confusionmatrixthreshold.go +++ b/typedapi/types/confusionmatrixthreshold.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ConfusionMatrixThreshold type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L137-L158 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L137-L158 type ConfusionMatrixThreshold struct { // FalseNegative False Negative FalseNegative int `json:"fn"` @@ -65,7 +66,7 @@ func (s *ConfusionMatrixThreshold) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FalseNegative", err) } s.FalseNegative = value case float64: @@ -81,7 +82,7 @@ func (s *ConfusionMatrixThreshold) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FalsePositive", err) } s.FalsePositive = value case float64: @@ -97,7 +98,7 @@ func (s *ConfusionMatrixThreshold) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TrueNegative", err) } s.TrueNegative = value case float64: @@ -113,7 +114,7 @@ func (s *ConfusionMatrixThreshold) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TruePositive", err) } s.TruePositive = value case float64: diff --git a/typedapi/types/connection.go b/typedapi/types/connection.go index f3e3c19702..1c77fd39d5 100644 --- a/typedapi/types/connection.go +++ b/typedapi/types/connection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Connection type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/graph/_types/Connection.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/graph/_types/Connection.ts#L22-L27 type Connection struct { DocCount int64 `json:"doc_count"` Source int64 `json:"source"` @@ -60,7 +61,7 @@ func (s *Connection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -75,7 +76,7 @@ func (s *Connection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } s.Source = value case float64: @@ -90,7 +91,7 @@ func (s *Connection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Target", err) } s.Target = value case float64: @@ -105,7 +106,7 @@ func (s *Connection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Weight", err) } f := Float64(value) s.Weight = f diff --git a/typedapi/types/constantkeywordproperty.go b/typedapi/types/constantkeywordproperty.go index 020b5a613c..22cad3679c 100644 --- a/typedapi/types/constantkeywordproperty.go +++ b/typedapi/types/constantkeywordproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // ConstantKeywordProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/specialized.ts#L44-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/specialized.ts#L44-L47 type ConstantKeywordProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -61,7 +62,7 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -379,7 +380,7 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -392,7 +393,7 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -704,12 +705,12 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } } diff --git a/typedapi/types/constantscorequery.go b/typedapi/types/constantscorequery.go index ad662fea0f..f3e6827c29 100644 --- a/typedapi/types/constantscorequery.go +++ b/typedapi/types/constantscorequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ConstantScoreQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L69-L76 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L69-L76 type ConstantScoreQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -68,7 +69,7 @@ func (s *ConstantScoreQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -79,13 +80,13 @@ func (s *ConstantScoreQuery) UnmarshalJSON(data []byte) error { case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/context.go b/typedapi/types/context.go index e94e1d0142..68ffd0c95f 100644 --- a/typedapi/types/context.go +++ b/typedapi/types/context.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // GeoLocation // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L225-L230 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L225-L230 type Context interface{} diff --git a/typedapi/types/contextmethod.go b/typedapi/types/contextmethod.go index de5ddfee04..b31a67b68b 100644 --- a/typedapi/types/contextmethod.go +++ b/typedapi/types/contextmethod.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ContextMethod type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/get_script_context/types.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/get_script_context/types.ts#L27-L31 type ContextMethod struct { Name string `json:"name"` Params []ContextMethodParam `json:"params"` @@ -54,18 +55,18 @@ func (s *ContextMethod) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "params": if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "return_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ReturnType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/contextmethodparam.go b/typedapi/types/contextmethodparam.go index 3cba2fa70c..e883452b19 100644 --- a/typedapi/types/contextmethodparam.go +++ b/typedapi/types/contextmethodparam.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ContextMethodParam type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/get_script_context/types.ts#L33-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/get_script_context/types.ts#L33-L36 type ContextMethodParam struct { Name string `json:"name"` Type string `json:"type"` @@ -53,13 +54,13 @@ func (s *ContextMethodParam) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/convertprocessor.go b/typedapi/types/convertprocessor.go index d519a7f328..6552e2257f 100644 --- a/typedapi/types/convertprocessor.go +++ b/typedapi/types/convertprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // ConvertProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L445-L465 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L445-L465 type ConvertProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -76,7 +77,7 @@ func (s *ConvertProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -87,13 +88,13 @@ func (s *ConvertProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,7 +110,7 @@ func (s *ConvertProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -123,7 +124,7 @@ func (s *ConvertProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -132,13 +133,13 @@ func (s *ConvertProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -149,12 +150,12 @@ func (s *ConvertProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/coordinatorstats.go b/typedapi/types/coordinatorstats.go index 43d51d72fa..e5243d4eb2 100644 --- a/typedapi/types/coordinatorstats.go +++ b/typedapi/types/coordinatorstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CoordinatorStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/stats/types.ts#L29-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/stats/types.ts#L29-L35 type CoordinatorStats struct { ExecutedSearchesTotal int64 `json:"executed_searches_total"` NodeId string `json:"node_id"` @@ -61,7 +62,7 @@ func (s *CoordinatorStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutedSearchesTotal", err) } s.ExecutedSearchesTotal = value case float64: @@ -71,7 +72,7 @@ func (s *CoordinatorStats) UnmarshalJSON(data []byte) error { case "node_id": if err := dec.Decode(&s.NodeId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } case "queue_size": @@ -82,7 +83,7 @@ func (s *CoordinatorStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "QueueSize", err) } s.QueueSize = value case float64: @@ -98,7 +99,7 @@ func (s *CoordinatorStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RemoteRequestsCurrent", err) } s.RemoteRequestsCurrent = value case float64: @@ -113,7 +114,7 @@ func (s *CoordinatorStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RemoteRequestsTotal", err) } s.RemoteRequestsTotal = value case float64: diff --git a/typedapi/types/coordsgeobounds.go b/typedapi/types/coordsgeobounds.go index 4805591cec..e4d8ff51ae 100644 --- a/typedapi/types/coordsgeobounds.go +++ b/typedapi/types/coordsgeobounds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CoordsGeoBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Geo.ts#L154-L159 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Geo.ts#L154-L159 type CoordsGeoBounds struct { Bottom Float64 `json:"bottom"` Left Float64 `json:"left"` @@ -60,7 +61,7 @@ func (s *CoordsGeoBounds) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Bottom", err) } f := Float64(value) s.Bottom = f @@ -76,7 +77,7 @@ func (s *CoordsGeoBounds) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Left", err) } f := Float64(value) s.Left = f @@ -92,7 +93,7 @@ func (s *CoordsGeoBounds) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Right", err) } f := Float64(value) s.Right = f @@ -108,7 +109,7 @@ func (s *CoordsGeoBounds) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Top", err) } f := Float64(value) s.Top = f diff --git a/typedapi/types/coreknnquery.go b/typedapi/types/coreknnquery.go index b327c4e0e9..4ecfa899e0 100644 --- a/typedapi/types/coreknnquery.go +++ b/typedapi/types/coreknnquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CoreKnnQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/knn_search/_types/Knn.ts#L24-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/knn_search/_types/Knn.ts#L24-L33 type CoreKnnQuery struct { // Field The name of the vector field to search against Field string `json:"field"` @@ -59,7 +60,7 @@ func (s *CoreKnnQuery) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "k": @@ -69,7 +70,7 @@ func (s *CoreKnnQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "K", err) } s.K = value case float64: @@ -84,7 +85,7 @@ func (s *CoreKnnQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumCandidates", err) } s.NumCandidates = value case float64: @@ -94,7 +95,7 @@ func (s *CoreKnnQuery) UnmarshalJSON(data []byte) error { case "query_vector": if err := dec.Decode(&s.QueryVector); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryVector", err) } } diff --git a/typedapi/types/counter.go b/typedapi/types/counter.go index 2952e85d26..8ed9b71348 100644 --- a/typedapi/types/counter.go +++ b/typedapi/types/counter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Counter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L35-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L35-L38 type Counter struct { Active int64 `json:"active"` Total int64 `json:"total"` @@ -58,7 +59,7 @@ func (s *Counter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Active", err) } s.Active = value case float64: @@ -73,7 +74,7 @@ func (s *Counter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: diff --git a/typedapi/types/countrecord.go b/typedapi/types/countrecord.go index de69494fcd..318ac3e8c9 100644 --- a/typedapi/types/countrecord.go +++ b/typedapi/types/countrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CountRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/count/types.ts#L23-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/count/types.ts#L23-L39 type CountRecord struct { // Count the document count Count *string `json:"count,omitempty"` @@ -58,7 +59,7 @@ func (s *CountRecord) UnmarshalJSON(data []byte) error { case "count", "dc", "docs.count", "docsCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -69,12 +70,12 @@ func (s *CountRecord) UnmarshalJSON(data []byte) error { case "epoch", "t", "time": if err := dec.Decode(&s.Epoch); err != nil { - return err + return fmt.Errorf("%s | %w", "Epoch", err) } case "timestamp", "ts", "hms", "hhmmss": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } } diff --git a/typedapi/types/cpu.go b/typedapi/types/cpu.go index 8f2bc2f08f..fd80c63a8d 100644 --- a/typedapi/types/cpu.go +++ b/typedapi/types/cpu.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Cpu type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L539-L548 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L539-L548 type Cpu struct { LoadAverage map[string]Float64 `json:"load_average,omitempty"` Percent *int `json:"percent,omitempty"` @@ -62,7 +63,7 @@ func (s *Cpu) UnmarshalJSON(data []byte) error { s.LoadAverage = make(map[string]Float64, 0) } if err := dec.Decode(&s.LoadAverage); err != nil { - return err + return fmt.Errorf("%s | %w", "LoadAverage", err) } case "percent": @@ -73,7 +74,7 @@ func (s *Cpu) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Percent", err) } s.Percent = &value case float64: @@ -83,32 +84,32 @@ func (s *Cpu) UnmarshalJSON(data []byte) error { case "sys": if err := dec.Decode(&s.Sys); err != nil { - return err + return fmt.Errorf("%s | %w", "Sys", err) } case "sys_in_millis": if err := dec.Decode(&s.SysInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "SysInMillis", err) } case "total": if err := dec.Decode(&s.Total); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } case "total_in_millis": if err := dec.Decode(&s.TotalInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalInMillis", err) } case "user": if err := dec.Decode(&s.User); err != nil { - return err + return fmt.Errorf("%s | %w", "User", err) } case "user_in_millis": if err := dec.Decode(&s.UserInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "UserInMillis", err) } } diff --git a/typedapi/types/cpuacct.go b/typedapi/types/cpuacct.go index 1c67b21960..eb08da210f 100644 --- a/typedapi/types/cpuacct.go +++ b/typedapi/types/cpuacct.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CpuAcct type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L476-L485 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L476-L485 type CpuAcct struct { // ControlGroup The `cpuacct` control group to which the Elasticsearch process belongs. ControlGroup *string `json:"control_group,omitempty"` @@ -57,7 +58,7 @@ func (s *CpuAcct) UnmarshalJSON(data []byte) error { case "control_group": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ControlGroup", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,7 +69,7 @@ func (s *CpuAcct) UnmarshalJSON(data []byte) error { case "usage_nanos": if err := dec.Decode(&s.UsageNanos); err != nil { - return err + return fmt.Errorf("%s | %w", "UsageNanos", err) } } diff --git a/typedapi/types/createdstatus.go b/typedapi/types/createdstatus.go index 5922c6fd05..70fc866919 100644 --- a/typedapi/types/createdstatus.go +++ b/typedapi/types/createdstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CreatedStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/CreatedStatus.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/CreatedStatus.ts#L20-L22 type CreatedStatus struct { Created bool `json:"created"` } @@ -57,7 +58,7 @@ func (s *CreatedStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Created", err) } s.Created = value case bool: diff --git a/typedapi/types/createoperation.go b/typedapi/types/createoperation.go index 24dee95df0..0ead85e116 100644 --- a/typedapi/types/createoperation.go +++ b/typedapi/types/createoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // CreateOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/bulk/types.ts#L130-L130 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/bulk/types.ts#L130-L130 type CreateOperation struct { // DynamicTemplates A map from the full name of fields to the name of dynamic templates. // Defaults to an empty map. @@ -81,12 +82,12 @@ func (s *CreateOperation) UnmarshalJSON(data []byte) error { s.DynamicTemplates = make(map[string]string, 0) } if err := dec.Decode(&s.DynamicTemplates); err != nil { - return err + return fmt.Errorf("%s | %w", "DynamicTemplates", err) } case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "if_primary_term": @@ -96,7 +97,7 @@ func (s *CreateOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IfPrimaryTerm", err) } s.IfPrimaryTerm = &value case float64: @@ -106,18 +107,18 @@ func (s *CreateOperation) UnmarshalJSON(data []byte) error { case "if_seq_no": if err := dec.Decode(&s.IfSeqNo); err != nil { - return err + return fmt.Errorf("%s | %w", "IfSeqNo", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "pipeline": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pipeline", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -133,7 +134,7 @@ func (s *CreateOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RequireAlias", err) } s.RequireAlias = &value case bool: @@ -142,17 +143,17 @@ func (s *CreateOperation) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "version_type": if err := dec.Decode(&s.VersionType); err != nil { - return err + return fmt.Errorf("%s | %w", "VersionType", err) } } diff --git a/typedapi/types/csvprocessor.go b/typedapi/types/csvprocessor.go index c442b6a8dc..98445d08dc 100644 --- a/typedapi/types/csvprocessor.go +++ b/typedapi/types/csvprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CsvProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L467-L500 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L467-L500 type CsvProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -82,7 +83,7 @@ func (s *CsvProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -93,18 +94,18 @@ func (s *CsvProcessor) UnmarshalJSON(data []byte) error { case "empty_value": if err := dec.Decode(&s.EmptyValue); err != nil { - return err + return fmt.Errorf("%s | %w", "EmptyValue", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -120,7 +121,7 @@ func (s *CsvProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -134,7 +135,7 @@ func (s *CsvProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -143,13 +144,13 @@ func (s *CsvProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "quote": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Quote", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -161,7 +162,7 @@ func (s *CsvProcessor) UnmarshalJSON(data []byte) error { case "separator": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Separator", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -173,7 +174,7 @@ func (s *CsvProcessor) UnmarshalJSON(data []byte) error { case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -188,13 +189,13 @@ func (s *CsvProcessor) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetFields", err) } s.TargetFields = append(s.TargetFields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.TargetFields); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetFields", err) } } @@ -205,7 +206,7 @@ func (s *CsvProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Trim", err) } s.Trim = &value case bool: diff --git a/typedapi/types/cumulativecardinalityaggregate.go b/typedapi/types/cumulativecardinalityaggregate.go index ce2d619bd1..abd12b3c69 100644 --- a/typedapi/types/cumulativecardinalityaggregate.go +++ b/typedapi/types/cumulativecardinalityaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CumulativeCardinalityAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L747-L755 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L747-L755 type CumulativeCardinalityAggregate struct { Meta Metadata `json:"meta,omitempty"` Value int64 `json:"value"` @@ -54,7 +55,7 @@ func (s *CumulativeCardinalityAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": @@ -64,7 +65,7 @@ func (s *CumulativeCardinalityAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } s.Value = value case float64: @@ -75,7 +76,7 @@ func (s *CumulativeCardinalityAggregate) UnmarshalJSON(data []byte) error { case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/cumulativecardinalityaggregation.go b/typedapi/types/cumulativecardinalityaggregation.go index 637889589c..2a3a135cd7 100644 --- a/typedapi/types/cumulativecardinalityaggregation.go +++ b/typedapi/types/cumulativecardinalityaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // CumulativeCardinalityAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L192-L192 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L192-L192 type CumulativeCardinalityAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -63,13 +64,13 @@ func (s *CumulativeCardinalityAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,18 +81,18 @@ func (s *CumulativeCardinalityAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/cumulativesumaggregation.go b/typedapi/types/cumulativesumaggregation.go index 669743319f..ef6192cb32 100644 --- a/typedapi/types/cumulativesumaggregation.go +++ b/typedapi/types/cumulativesumaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // CumulativeSumAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L194-L194 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L194-L194 type CumulativeSumAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -63,13 +64,13 @@ func (s *CumulativeSumAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,18 +81,18 @@ func (s *CumulativeSumAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/currentnode.go b/typedapi/types/currentnode.go index 0ead7bc4cf..1645292d19 100644 --- a/typedapi/types/currentnode.go +++ b/typedapi/types/currentnode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CurrentNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/types.ts#L78-L84 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/types.ts#L78-L84 type CurrentNode struct { Attributes map[string]string `json:"attributes"` Id string `json:"id"` @@ -59,22 +60,22 @@ func (s *CurrentNode) UnmarshalJSON(data []byte) error { s.Attributes = make(map[string]string, 0) } if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "transport_address": if err := dec.Decode(&s.TransportAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "TransportAddress", err) } case "weight_ranking": @@ -85,7 +86,7 @@ func (s *CurrentNode) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "WeightRanking", err) } s.WeightRanking = value case float64: diff --git a/typedapi/types/customanalyzer.go b/typedapi/types/customanalyzer.go index 7d9e1396f4..f0792fa826 100644 --- a/typedapi/types/customanalyzer.go +++ b/typedapi/types/customanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CustomAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/analyzers.ts#L28-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/analyzers.ts#L28-L35 type CustomAnalyzer struct { CharFilter []string `json:"char_filter,omitempty"` Filter []string `json:"filter,omitempty"` @@ -57,12 +58,12 @@ func (s *CustomAnalyzer) UnmarshalJSON(data []byte) error { case "char_filter": if err := dec.Decode(&s.CharFilter); err != nil { - return err + return fmt.Errorf("%s | %w", "CharFilter", err) } case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "position_increment_gap": @@ -73,7 +74,7 @@ func (s *CustomAnalyzer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PositionIncrementGap", err) } s.PositionIncrementGap = &value case float64: @@ -89,7 +90,7 @@ func (s *CustomAnalyzer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PositionOffsetGap", err) } s.PositionOffsetGap = &value case float64: @@ -100,7 +101,7 @@ func (s *CustomAnalyzer) UnmarshalJSON(data []byte) error { case "tokenizer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenizer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -111,7 +112,7 @@ func (s *CustomAnalyzer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/customcategorizetextanalyzer.go b/typedapi/types/customcategorizetextanalyzer.go index 8b95c8b4f5..2db3aedb10 100644 --- a/typedapi/types/customcategorizetextanalyzer.go +++ b/typedapi/types/customcategorizetextanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // CustomCategorizeTextAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L1108-L1112 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L1108-L1112 type CustomCategorizeTextAnalyzer struct { CharFilter []string `json:"char_filter,omitempty"` Filter []string `json:"filter,omitempty"` @@ -54,18 +55,18 @@ func (s *CustomCategorizeTextAnalyzer) UnmarshalJSON(data []byte) error { case "char_filter": if err := dec.Decode(&s.CharFilter); err != nil { - return err + return fmt.Errorf("%s | %w", "CharFilter", err) } case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "tokenizer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenizer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/customnormalizer.go b/typedapi/types/customnormalizer.go index f50f6dcfd6..c9be4830da 100644 --- a/typedapi/types/customnormalizer.go +++ b/typedapi/types/customnormalizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // CustomNormalizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/normalizers.ts#L30-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/normalizers.ts#L30-L34 type CustomNormalizer struct { CharFilter []string `json:"char_filter,omitempty"` Filter []string `json:"filter,omitempty"` diff --git a/typedapi/types/dailyschedule.go b/typedapi/types/dailyschedule.go index c70011168c..557aae1d83 100644 --- a/typedapi/types/dailyschedule.go +++ b/typedapi/types/dailyschedule.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DailySchedule type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Schedule.ts#L33-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Schedule.ts#L33-L35 type DailySchedule struct { At []ScheduleTimeOfDay `json:"at"` } diff --git a/typedapi/types/danglingindex.go b/typedapi/types/danglingindex.go index c152629015..35b5b09463 100644 --- a/typedapi/types/danglingindex.go +++ b/typedapi/types/danglingindex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DanglingIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/dangling_indices/list_dangling_indices/ListDanglingIndicesResponse.ts#L29-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/dangling_indices/list_dangling_indices/ListDanglingIndicesResponse.ts#L29-L34 type DanglingIndex struct { CreationDateMillis int64 `json:"creation_date_millis"` IndexName string `json:"index_name"` @@ -55,13 +56,13 @@ func (s *DanglingIndex) UnmarshalJSON(data []byte) error { case "creation_date_millis": if err := dec.Decode(&s.CreationDateMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "CreationDateMillis", err) } case "index_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -73,7 +74,7 @@ func (s *DanglingIndex) UnmarshalJSON(data []byte) error { case "index_uuid": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexUuid", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -88,13 +89,13 @@ func (s *DanglingIndex) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeIds", err) } s.NodeIds = append(s.NodeIds, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.NodeIds); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeIds", err) } } diff --git a/typedapi/types/datacounts.go b/typedapi/types/datacounts.go index 48ac480b54..cbe18cdfda 100644 --- a/typedapi/types/datacounts.go +++ b/typedapi/types/datacounts.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataCounts type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Job.ts#L352-L372 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Job.ts#L352-L372 type DataCounts struct { BucketCount int64 `json:"bucket_count"` EarliestRecordTimestamp *int64 `json:"earliest_record_timestamp,omitempty"` @@ -75,7 +76,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BucketCount", err) } s.BucketCount = value case float64: @@ -90,7 +91,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "EarliestRecordTimestamp", err) } s.EarliestRecordTimestamp = &value case float64: @@ -105,7 +106,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "EmptyBucketCount", err) } s.EmptyBucketCount = value case float64: @@ -120,7 +121,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "InputBytes", err) } s.InputBytes = value case float64: @@ -135,7 +136,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "InputFieldCount", err) } s.InputFieldCount = value case float64: @@ -150,7 +151,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "InputRecordCount", err) } s.InputRecordCount = value case float64: @@ -165,7 +166,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "InvalidDateCount", err) } s.InvalidDateCount = value case float64: @@ -175,7 +176,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "last_data_time": @@ -185,7 +186,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LastDataTime", err) } s.LastDataTime = &value case float64: @@ -200,7 +201,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LatestBucketTimestamp", err) } s.LatestBucketTimestamp = &value case float64: @@ -215,7 +216,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LatestEmptyBucketTimestamp", err) } s.LatestEmptyBucketTimestamp = &value case float64: @@ -230,7 +231,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LatestRecordTimestamp", err) } s.LatestRecordTimestamp = &value case float64: @@ -245,7 +246,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LatestSparseBucketTimestamp", err) } s.LatestSparseBucketTimestamp = &value case float64: @@ -260,7 +261,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LogTime", err) } s.LogTime = &value case float64: @@ -275,7 +276,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MissingFieldCount", err) } s.MissingFieldCount = value case float64: @@ -290,7 +291,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "OutOfOrderTimestampCount", err) } s.OutOfOrderTimestampCount = value case float64: @@ -305,7 +306,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessedFieldCount", err) } s.ProcessedFieldCount = value case float64: @@ -320,7 +321,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessedRecordCount", err) } s.ProcessedRecordCount = value case float64: @@ -335,7 +336,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SparseBucketCount", err) } s.SparseBucketCount = value case float64: diff --git a/typedapi/types/datadescription.go b/typedapi/types/datadescription.go index 69ba128cae..c686240797 100644 --- a/typedapi/types/datadescription.go +++ b/typedapi/types/datadescription.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataDescription type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Job.ts#L374-L390 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Job.ts#L374-L390 type DataDescription struct { FieldDelimiter *string `json:"field_delimiter,omitempty"` // Format Only JSON format is supported at this time. @@ -67,7 +68,7 @@ func (s *DataDescription) UnmarshalJSON(data []byte) error { case "field_delimiter": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldDelimiter", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *DataDescription) UnmarshalJSON(data []byte) error { case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -90,13 +91,13 @@ func (s *DataDescription) UnmarshalJSON(data []byte) error { case "time_field": if err := dec.Decode(&s.TimeField); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeField", err) } case "time_format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeFormat", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/dataemailattachment.go b/typedapi/types/dataemailattachment.go index 192c4a9caa..031891340c 100644 --- a/typedapi/types/dataemailattachment.go +++ b/typedapi/types/dataemailattachment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // DataEmailAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L234-L236 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L234-L236 type DataEmailAttachment struct { Format *dataattachmentformat.DataAttachmentFormat `json:"format,omitempty"` } diff --git a/typedapi/types/datafeedauthorization.go b/typedapi/types/datafeedauthorization.go index 2e6ce3c0f9..0e466cb87d 100644 --- a/typedapi/types/datafeedauthorization.go +++ b/typedapi/types/datafeedauthorization.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DatafeedAuthorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Authorization.ts#L31-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Authorization.ts#L31-L43 type DatafeedAuthorization struct { // ApiKey If an API key was used for the most recent update to the datafeed, its name // and identifier are listed in the response. @@ -60,18 +61,18 @@ func (s *DatafeedAuthorization) UnmarshalJSON(data []byte) error { case "api_key": if err := dec.Decode(&s.ApiKey); err != nil { - return err + return fmt.Errorf("%s | %w", "ApiKey", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "service_account": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ServiceAccount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/datafeedconfig.go b/typedapi/types/datafeedconfig.go index a7031ef586..f16d2ebaff 100644 --- a/typedapi/types/datafeedconfig.go +++ b/typedapi/types/datafeedconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DatafeedConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Datafeed.ts#L60-L117 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Datafeed.ts#L60-L117 type DatafeedConfig struct { // Aggregations If set, the datafeed performs aggregation searches. Support for aggregations // is limited and should be used only with low cardinality data. @@ -120,42 +121,42 @@ func (s *DatafeedConfig) UnmarshalJSON(data []byte) error { s.Aggregations = make(map[string]Aggregations, 0) } if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "chunking_config": if err := dec.Decode(&s.ChunkingConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "ChunkingConfig", err) } case "datafeed_id": if err := dec.Decode(&s.DatafeedId); err != nil { - return err + return fmt.Errorf("%s | %w", "DatafeedId", err) } case "delayed_data_check_config": if err := dec.Decode(&s.DelayedDataCheckConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "DelayedDataCheckConfig", err) } case "frequency": if err := dec.Decode(&s.Frequency); err != nil { - return err + return fmt.Errorf("%s | %w", "Frequency", err) } case "indices", "indexes": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "indices_options": if err := dec.Decode(&s.IndicesOptions); err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesOptions", err) } case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "max_empty_searches": @@ -166,7 +167,7 @@ func (s *DatafeedConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxEmptySearches", err) } s.MaxEmptySearches = &value case float64: @@ -176,17 +177,17 @@ func (s *DatafeedConfig) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "query_delay": if err := dec.Decode(&s.QueryDelay); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryDelay", err) } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } case "script_fields": @@ -194,7 +195,7 @@ func (s *DatafeedConfig) UnmarshalJSON(data []byte) error { s.ScriptFields = make(map[string]ScriptField, 0) } if err := dec.Decode(&s.ScriptFields); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptFields", err) } case "scroll_size": @@ -205,7 +206,7 @@ func (s *DatafeedConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollSize", err) } s.ScrollSize = &value case float64: diff --git a/typedapi/types/datafeedrunningstate.go b/typedapi/types/datafeedrunningstate.go index 8cdeb354de..f4272f48ce 100644 --- a/typedapi/types/datafeedrunningstate.go +++ b/typedapi/types/datafeedrunningstate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DatafeedRunningState type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Datafeed.ts#L198-L212 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Datafeed.ts#L198-L212 type DatafeedRunningState struct { // RealTimeConfigured Indicates if the datafeed is "real-time"; meaning that the datafeed has no // configured `end` time. @@ -66,7 +67,7 @@ func (s *DatafeedRunningState) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RealTimeConfigured", err) } s.RealTimeConfigured = value case bool: @@ -80,7 +81,7 @@ func (s *DatafeedRunningState) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RealTimeRunning", err) } s.RealTimeRunning = value case bool: @@ -89,7 +90,7 @@ func (s *DatafeedRunningState) UnmarshalJSON(data []byte) error { case "search_interval": if err := dec.Decode(&s.SearchInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchInterval", err) } } diff --git a/typedapi/types/datafeeds.go b/typedapi/types/datafeeds.go index 220549c98c..ffbdabe148 100644 --- a/typedapi/types/datafeeds.go +++ b/typedapi/types/datafeeds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Datafeeds type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/info/types.ts#L40-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/info/types.ts#L40-L42 type Datafeeds struct { ScrollSize int `json:"scroll_size"` } @@ -58,7 +59,7 @@ func (s *Datafeeds) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollSize", err) } s.ScrollSize = value case float64: diff --git a/typedapi/types/datafeedsrecord.go b/typedapi/types/datafeedsrecord.go index d9ca0dc5fa..c519487f8c 100644 --- a/typedapi/types/datafeedsrecord.go +++ b/typedapi/types/datafeedsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DatafeedsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/ml_datafeeds/types.ts#L22-L87 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/ml_datafeeds/types.ts#L22-L87 type DatafeedsRecord struct { // AssignmentExplanation For started datafeeds only, contains messages relating to the selection of a // node. @@ -87,7 +88,7 @@ func (s *DatafeedsRecord) UnmarshalJSON(data []byte) error { case "assignment_explanation", "ae": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AssignmentExplanation", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -99,7 +100,7 @@ func (s *DatafeedsRecord) UnmarshalJSON(data []byte) error { case "buckets.count", "bc", "bucketsCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -111,7 +112,7 @@ func (s *DatafeedsRecord) UnmarshalJSON(data []byte) error { case "id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -123,7 +124,7 @@ func (s *DatafeedsRecord) UnmarshalJSON(data []byte) error { case "node.address", "na", "nodeAddress": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeAddress", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -135,7 +136,7 @@ func (s *DatafeedsRecord) UnmarshalJSON(data []byte) error { case "node.ephemeral_id", "ne", "nodeEphemeralId": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeEphemeralId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -147,7 +148,7 @@ func (s *DatafeedsRecord) UnmarshalJSON(data []byte) error { case "node.id", "ni", "nodeId": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -159,7 +160,7 @@ func (s *DatafeedsRecord) UnmarshalJSON(data []byte) error { case "node.name", "nn", "nodeName": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -171,7 +172,7 @@ func (s *DatafeedsRecord) UnmarshalJSON(data []byte) error { case "search.bucket_avg", "sba", "searchBucketAvg": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchBucketAvg", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -183,7 +184,7 @@ func (s *DatafeedsRecord) UnmarshalJSON(data []byte) error { case "search.count", "sc", "searchCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -195,7 +196,7 @@ func (s *DatafeedsRecord) UnmarshalJSON(data []byte) error { case "search.exp_avg_hour", "seah", "searchExpAvgHour": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchExpAvgHour", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -207,7 +208,7 @@ func (s *DatafeedsRecord) UnmarshalJSON(data []byte) error { case "search.time", "st", "searchTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -218,7 +219,7 @@ func (s *DatafeedsRecord) UnmarshalJSON(data []byte) error { case "state", "s": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } } diff --git a/typedapi/types/datafeedstats.go b/typedapi/types/datafeedstats.go index abcd4c2c9b..8df5bfb37f 100644 --- a/typedapi/types/datafeedstats.go +++ b/typedapi/types/datafeedstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DatafeedStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Datafeed.ts#L140-L169 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Datafeed.ts#L140-L169 type DatafeedStats struct { // AssignmentExplanation For started datafeeds only, contains messages relating to the selection of a // node. @@ -74,7 +75,7 @@ func (s *DatafeedStats) UnmarshalJSON(data []byte) error { case "assignment_explanation": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AssignmentExplanation", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,27 +86,27 @@ func (s *DatafeedStats) UnmarshalJSON(data []byte) error { case "datafeed_id": if err := dec.Decode(&s.DatafeedId); err != nil { - return err + return fmt.Errorf("%s | %w", "DatafeedId", err) } case "node": if err := dec.Decode(&s.Node); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } case "running_state": if err := dec.Decode(&s.RunningState); err != nil { - return err + return fmt.Errorf("%s | %w", "RunningState", err) } case "state": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } case "timing_stats": if err := dec.Decode(&s.TimingStats); err != nil { - return err + return fmt.Errorf("%s | %w", "TimingStats", err) } } diff --git a/typedapi/types/datafeedtimingstats.go b/typedapi/types/datafeedtimingstats.go index bed6b2161d..67a6d50858 100644 --- a/typedapi/types/datafeedtimingstats.go +++ b/typedapi/types/datafeedtimingstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DatafeedTimingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Datafeed.ts#L171-L196 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Datafeed.ts#L171-L196 type DatafeedTimingStats struct { // AverageSearchTimePerBucketMs The average search time per bucket, in milliseconds. AverageSearchTimePerBucketMs Float64 `json:"average_search_time_per_bucket_ms,omitempty"` @@ -63,7 +64,7 @@ func (s *DatafeedTimingStats) UnmarshalJSON(data []byte) error { case "average_search_time_per_bucket_ms": if err := dec.Decode(&s.AverageSearchTimePerBucketMs); err != nil { - return err + return fmt.Errorf("%s | %w", "AverageSearchTimePerBucketMs", err) } case "bucket_count": @@ -73,7 +74,7 @@ func (s *DatafeedTimingStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BucketCount", err) } s.BucketCount = value case float64: @@ -83,12 +84,12 @@ func (s *DatafeedTimingStats) UnmarshalJSON(data []byte) error { case "exponential_average_search_time_per_hour_ms": if err := dec.Decode(&s.ExponentialAverageSearchTimePerHourMs); err != nil { - return err + return fmt.Errorf("%s | %w", "ExponentialAverageSearchTimePerHourMs", err) } case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "search_count": @@ -98,7 +99,7 @@ func (s *DatafeedTimingStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SearchCount", err) } s.SearchCount = value case float64: @@ -108,7 +109,7 @@ func (s *DatafeedTimingStats) UnmarshalJSON(data []byte) error { case "total_search_time_ms": if err := dec.Decode(&s.TotalSearchTimeMs); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalSearchTimeMs", err) } } diff --git a/typedapi/types/dataframeanalysis.go b/typedapi/types/dataframeanalysis.go index 38c4469650..79cc1dae35 100644 --- a/typedapi/types/dataframeanalysis.go +++ b/typedapi/types/dataframeanalysis.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalysis type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L134-L213 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L134-L213 type DataframeAnalysis struct { // Alpha Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss @@ -169,7 +170,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Alpha", err) } f := Float64(value) s.Alpha = &f @@ -181,7 +182,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case "dependent_variable": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DependentVariable", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -197,7 +198,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DownsampleFactor", err) } f := Float64(value) s.DownsampleFactor = &f @@ -213,7 +214,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EarlyStoppingEnabled", err) } s.EarlyStoppingEnabled = &value case bool: @@ -227,7 +228,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Eta", err) } f := Float64(value) s.Eta = &f @@ -243,7 +244,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "EtaGrowthRatePerTree", err) } f := Float64(value) s.EtaGrowthRatePerTree = &f @@ -259,7 +260,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureBagFraction", err) } f := Float64(value) s.FeatureBagFraction = &f @@ -270,7 +271,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case "feature_processors": if err := dec.Decode(&s.FeatureProcessors); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureProcessors", err) } case "gamma": @@ -280,7 +281,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Gamma", err) } f := Float64(value) s.Gamma = &f @@ -296,7 +297,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lambda", err) } f := Float64(value) s.Lambda = &f @@ -313,7 +314,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOptimizationRoundsPerHyperparameter", err) } s.MaxOptimizationRoundsPerHyperparameter = &value case float64: @@ -329,7 +330,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTrees", err) } s.MaxTrees = &value case float64: @@ -345,7 +346,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopFeatureImportanceValues", err) } s.NumTopFeatureImportanceValues = &value case float64: @@ -355,7 +356,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case "prediction_field_name": if err := dec.Decode(&s.PredictionFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "PredictionFieldName", err) } case "randomize_seed": @@ -365,7 +366,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RandomizeSeed", err) } f := Float64(value) s.RandomizeSeed = &f @@ -382,7 +383,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SoftTreeDepthLimit", err) } s.SoftTreeDepthLimit = &value case float64: @@ -397,7 +398,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SoftTreeDepthTolerance", err) } f := Float64(value) s.SoftTreeDepthTolerance = &f @@ -408,7 +409,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case "training_percent": if err := dec.Decode(&s.TrainingPercent); err != nil { - return err + return fmt.Errorf("%s | %w", "TrainingPercent", err) } } diff --git a/typedapi/types/dataframeanalysisanalyzedfields.go b/typedapi/types/dataframeanalysisanalyzedfields.go index 291c913ac5..f7407ed774 100644 --- a/typedapi/types/dataframeanalysisanalyzedfields.go +++ b/typedapi/types/dataframeanalysisanalyzedfields.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DataframeAnalysisAnalyzedFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L238-L244 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L238-L244 type DataframeAnalysisAnalyzedFields struct { // Excludes An array of strings that defines the fields that will be included in the // analysis. @@ -62,12 +63,12 @@ func (s *DataframeAnalysisAnalyzedFields) UnmarshalJSON(data []byte) error { case "excludes": if err := dec.Decode(&s.Excludes); err != nil { - return err + return fmt.Errorf("%s | %w", "Excludes", err) } case "includes": if err := dec.Decode(&s.Includes); err != nil { - return err + return fmt.Errorf("%s | %w", "Includes", err) } } diff --git a/typedapi/types/dataframeanalysisclassification.go b/typedapi/types/dataframeanalysisclassification.go index d22c97de9b..5f7f497c5c 100644 --- a/typedapi/types/dataframeanalysisclassification.go +++ b/typedapi/types/dataframeanalysisclassification.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalysisClassification type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L227-L236 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L227-L236 type DataframeAnalysisClassification struct { // Alpha Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss @@ -178,7 +179,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Alpha", err) } f := Float64(value) s.Alpha = &f @@ -190,7 +191,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case "class_assignment_objective": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassAssignmentObjective", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -202,7 +203,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case "dependent_variable": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DependentVariable", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -218,7 +219,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DownsampleFactor", err) } f := Float64(value) s.DownsampleFactor = &f @@ -234,7 +235,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EarlyStoppingEnabled", err) } s.EarlyStoppingEnabled = &value case bool: @@ -248,7 +249,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Eta", err) } f := Float64(value) s.Eta = &f @@ -264,7 +265,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "EtaGrowthRatePerTree", err) } f := Float64(value) s.EtaGrowthRatePerTree = &f @@ -280,7 +281,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureBagFraction", err) } f := Float64(value) s.FeatureBagFraction = &f @@ -291,7 +292,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case "feature_processors": if err := dec.Decode(&s.FeatureProcessors); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureProcessors", err) } case "gamma": @@ -301,7 +302,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Gamma", err) } f := Float64(value) s.Gamma = &f @@ -317,7 +318,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lambda", err) } f := Float64(value) s.Lambda = &f @@ -334,7 +335,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOptimizationRoundsPerHyperparameter", err) } s.MaxOptimizationRoundsPerHyperparameter = &value case float64: @@ -350,7 +351,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTrees", err) } s.MaxTrees = &value case float64: @@ -366,7 +367,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopClasses", err) } s.NumTopClasses = &value case float64: @@ -382,7 +383,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopFeatureImportanceValues", err) } s.NumTopFeatureImportanceValues = &value case float64: @@ -392,7 +393,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case "prediction_field_name": if err := dec.Decode(&s.PredictionFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "PredictionFieldName", err) } case "randomize_seed": @@ -402,7 +403,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RandomizeSeed", err) } f := Float64(value) s.RandomizeSeed = &f @@ -419,7 +420,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SoftTreeDepthLimit", err) } s.SoftTreeDepthLimit = &value case float64: @@ -434,7 +435,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SoftTreeDepthTolerance", err) } f := Float64(value) s.SoftTreeDepthTolerance = &f @@ -445,7 +446,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case "training_percent": if err := dec.Decode(&s.TrainingPercent); err != nil { - return err + return fmt.Errorf("%s | %w", "TrainingPercent", err) } } diff --git a/typedapi/types/dataframeanalysiscontainer.go b/typedapi/types/dataframeanalysiscontainer.go index b70f99386c..ea1fdc8b68 100644 --- a/typedapi/types/dataframeanalysiscontainer.go +++ b/typedapi/types/dataframeanalysiscontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DataframeAnalysisContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L84-L101 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L84-L101 type DataframeAnalysisContainer struct { // Classification The configuration information necessary to perform classification. Classification *DataframeAnalysisClassification `json:"classification,omitempty"` diff --git a/typedapi/types/dataframeanalysisfeatureprocessor.go b/typedapi/types/dataframeanalysisfeatureprocessor.go index 7c9b2f8e39..0aa722d812 100644 --- a/typedapi/types/dataframeanalysisfeatureprocessor.go +++ b/typedapi/types/dataframeanalysisfeatureprocessor.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DataframeAnalysisFeatureProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L246-L258 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L246-L258 type DataframeAnalysisFeatureProcessor struct { // FrequencyEncoding The configuration information necessary to perform frequency encoding. FrequencyEncoding *DataframeAnalysisFeatureProcessorFrequencyEncoding `json:"frequency_encoding,omitempty"` diff --git a/typedapi/types/dataframeanalysisfeatureprocessorfrequencyencoding.go b/typedapi/types/dataframeanalysisfeatureprocessorfrequencyencoding.go index fe27436a6e..128e5fbff3 100644 --- a/typedapi/types/dataframeanalysisfeatureprocessorfrequencyencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessorfrequencyencoding.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DataframeAnalysisFeatureProcessorFrequencyEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L260-L267 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L260-L267 type DataframeAnalysisFeatureProcessorFrequencyEncoding struct { // FeatureName The resulting feature name. FeatureName string `json:"feature_name"` @@ -56,12 +57,12 @@ func (s *DataframeAnalysisFeatureProcessorFrequencyEncoding) UnmarshalJSON(data case "feature_name": if err := dec.Decode(&s.FeatureName); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureName", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "frequency_map": @@ -69,7 +70,7 @@ func (s *DataframeAnalysisFeatureProcessorFrequencyEncoding) UnmarshalJSON(data s.FrequencyMap = make(map[string]Float64, 0) } if err := dec.Decode(&s.FrequencyMap); err != nil { - return err + return fmt.Errorf("%s | %w", "FrequencyMap", err) } } diff --git a/typedapi/types/dataframeanalysisfeatureprocessormultiencoding.go b/typedapi/types/dataframeanalysisfeatureprocessormultiencoding.go index e00c3719b6..954f0d8378 100644 --- a/typedapi/types/dataframeanalysisfeatureprocessormultiencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessormultiencoding.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DataframeAnalysisFeatureProcessorMultiEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L269-L272 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L269-L272 type DataframeAnalysisFeatureProcessorMultiEncoding struct { // Processors The ordered array of custom processors to execute. Must be more than 1. Processors []int `json:"processors"` diff --git a/typedapi/types/dataframeanalysisfeatureprocessorngramencoding.go b/typedapi/types/dataframeanalysisfeatureprocessorngramencoding.go index 392663f6e5..6b8e730bb7 100644 --- a/typedapi/types/dataframeanalysisfeatureprocessorngramencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessorngramencoding.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalysisFeatureProcessorNGramEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L274-L286 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L274-L286 type DataframeAnalysisFeatureProcessorNGramEncoding struct { Custom *bool `json:"custom,omitempty"` // FeaturePrefix The feature name prefix. Defaults to ngram__. @@ -70,7 +71,7 @@ func (s *DataframeAnalysisFeatureProcessorNGramEncoding) UnmarshalJSON(data []by case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Custom", err) } s.Custom = &value case bool: @@ -80,7 +81,7 @@ func (s *DataframeAnalysisFeatureProcessorNGramEncoding) UnmarshalJSON(data []by case "feature_prefix": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FeaturePrefix", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -91,7 +92,7 @@ func (s *DataframeAnalysisFeatureProcessorNGramEncoding) UnmarshalJSON(data []by case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "length": @@ -102,7 +103,7 @@ func (s *DataframeAnalysisFeatureProcessorNGramEncoding) UnmarshalJSON(data []by case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Length", err) } s.Length = &value case float64: @@ -112,7 +113,7 @@ func (s *DataframeAnalysisFeatureProcessorNGramEncoding) UnmarshalJSON(data []by case "n_grams": if err := dec.Decode(&s.NGrams); err != nil { - return err + return fmt.Errorf("%s | %w", "NGrams", err) } case "start": @@ -123,7 +124,7 @@ func (s *DataframeAnalysisFeatureProcessorNGramEncoding) UnmarshalJSON(data []by case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Start", err) } s.Start = &value case float64: diff --git a/typedapi/types/dataframeanalysisfeatureprocessoronehotencoding.go b/typedapi/types/dataframeanalysisfeatureprocessoronehotencoding.go index c9f8053836..0d21445752 100644 --- a/typedapi/types/dataframeanalysisfeatureprocessoronehotencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessoronehotencoding.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalysisFeatureProcessorOneHotEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L288-L293 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L288-L293 type DataframeAnalysisFeatureProcessorOneHotEncoding struct { // Field The name of the field to encode. Field string `json:"field"` @@ -55,13 +56,13 @@ func (s *DataframeAnalysisFeatureProcessorOneHotEncoding) UnmarshalJSON(data []b case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "hot_map": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "HotMap", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/dataframeanalysisfeatureprocessortargetmeanencoding.go b/typedapi/types/dataframeanalysisfeatureprocessortargetmeanencoding.go index 2d4b402cac..0df4ff6818 100644 --- a/typedapi/types/dataframeanalysisfeatureprocessortargetmeanencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessortargetmeanencoding.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalysisFeatureProcessorTargetMeanEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L295-L304 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L295-L304 type DataframeAnalysisFeatureProcessorTargetMeanEncoding struct { // DefaultValue The default value if field value is not found in the target_map. DefaultValue int `json:"default_value"` @@ -65,7 +66,7 @@ func (s *DataframeAnalysisFeatureProcessorTargetMeanEncoding) UnmarshalJSON(data case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DefaultValue", err) } s.DefaultValue = value case float64: @@ -75,12 +76,12 @@ func (s *DataframeAnalysisFeatureProcessorTargetMeanEncoding) UnmarshalJSON(data case "feature_name": if err := dec.Decode(&s.FeatureName); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureName", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "target_map": @@ -88,7 +89,7 @@ func (s *DataframeAnalysisFeatureProcessorTargetMeanEncoding) UnmarshalJSON(data s.TargetMap = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.TargetMap); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetMap", err) } } diff --git a/typedapi/types/dataframeanalysisoutlierdetection.go b/typedapi/types/dataframeanalysisoutlierdetection.go index b0b927fdbc..5f5ac47bfa 100644 --- a/typedapi/types/dataframeanalysisoutlierdetection.go +++ b/typedapi/types/dataframeanalysisoutlierdetection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalysisOutlierDetection type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L103-L132 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L103-L132 type DataframeAnalysisOutlierDetection struct { // ComputeFeatureInfluence Specifies whether the feature influence calculation is enabled. ComputeFeatureInfluence *bool `json:"compute_feature_influence,omitempty"` @@ -80,7 +81,7 @@ func (s *DataframeAnalysisOutlierDetection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ComputeFeatureInfluence", err) } s.ComputeFeatureInfluence = &value case bool: @@ -94,7 +95,7 @@ func (s *DataframeAnalysisOutlierDetection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureInfluenceThreshold", err) } f := Float64(value) s.FeatureInfluenceThreshold = &f @@ -106,7 +107,7 @@ func (s *DataframeAnalysisOutlierDetection) UnmarshalJSON(data []byte) error { case "method": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Method", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -123,7 +124,7 @@ func (s *DataframeAnalysisOutlierDetection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NNeighbors", err) } s.NNeighbors = &value case float64: @@ -138,7 +139,7 @@ func (s *DataframeAnalysisOutlierDetection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "OutlierFraction", err) } f := Float64(value) s.OutlierFraction = &f @@ -154,7 +155,7 @@ func (s *DataframeAnalysisOutlierDetection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "StandardizationEnabled", err) } s.StandardizationEnabled = &value case bool: diff --git a/typedapi/types/dataframeanalysisregression.go b/typedapi/types/dataframeanalysisregression.go index 37256a9e67..4195117279 100644 --- a/typedapi/types/dataframeanalysisregression.go +++ b/typedapi/types/dataframeanalysisregression.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalysisRegression type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L215-L225 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L215-L225 type DataframeAnalysisRegression struct { // Alpha Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss @@ -175,7 +176,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Alpha", err) } f := Float64(value) s.Alpha = &f @@ -187,7 +188,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case "dependent_variable": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DependentVariable", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -203,7 +204,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DownsampleFactor", err) } f := Float64(value) s.DownsampleFactor = &f @@ -219,7 +220,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EarlyStoppingEnabled", err) } s.EarlyStoppingEnabled = &value case bool: @@ -233,7 +234,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Eta", err) } f := Float64(value) s.Eta = &f @@ -249,7 +250,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "EtaGrowthRatePerTree", err) } f := Float64(value) s.EtaGrowthRatePerTree = &f @@ -265,7 +266,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureBagFraction", err) } f := Float64(value) s.FeatureBagFraction = &f @@ -276,7 +277,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case "feature_processors": if err := dec.Decode(&s.FeatureProcessors); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureProcessors", err) } case "gamma": @@ -286,7 +287,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Gamma", err) } f := Float64(value) s.Gamma = &f @@ -302,7 +303,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lambda", err) } f := Float64(value) s.Lambda = &f @@ -314,7 +315,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case "loss_function": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LossFunction", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -330,7 +331,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LossFunctionParameter", err) } f := Float64(value) s.LossFunctionParameter = &f @@ -347,7 +348,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOptimizationRoundsPerHyperparameter", err) } s.MaxOptimizationRoundsPerHyperparameter = &value case float64: @@ -363,7 +364,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTrees", err) } s.MaxTrees = &value case float64: @@ -379,7 +380,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopFeatureImportanceValues", err) } s.NumTopFeatureImportanceValues = &value case float64: @@ -389,7 +390,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case "prediction_field_name": if err := dec.Decode(&s.PredictionFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "PredictionFieldName", err) } case "randomize_seed": @@ -399,7 +400,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RandomizeSeed", err) } f := Float64(value) s.RandomizeSeed = &f @@ -416,7 +417,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SoftTreeDepthLimit", err) } s.SoftTreeDepthLimit = &value case float64: @@ -431,7 +432,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SoftTreeDepthTolerance", err) } f := Float64(value) s.SoftTreeDepthTolerance = &f @@ -442,7 +443,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case "training_percent": if err := dec.Decode(&s.TrainingPercent); err != nil { - return err + return fmt.Errorf("%s | %w", "TrainingPercent", err) } } diff --git a/typedapi/types/dataframeanalytics.go b/typedapi/types/dataframeanalytics.go index a319a4c33f..09c7186dd2 100644 --- a/typedapi/types/dataframeanalytics.go +++ b/typedapi/types/dataframeanalytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DataframeAnalytics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L324-L344 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L324-L344 type DataframeAnalytics struct { // AnalysisStats An object containing information about the analysis job. AnalysisStats *DataframeAnalyticsStatsContainer `json:"analysis_stats,omitempty"` @@ -74,13 +75,13 @@ func (s *DataframeAnalytics) UnmarshalJSON(data []byte) error { case "analysis_stats": if err := dec.Decode(&s.AnalysisStats); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalysisStats", err) } case "assignment_explanation": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AssignmentExplanation", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -91,32 +92,32 @@ func (s *DataframeAnalytics) UnmarshalJSON(data []byte) error { case "data_counts": if err := dec.Decode(&s.DataCounts); err != nil { - return err + return fmt.Errorf("%s | %w", "DataCounts", err) } case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "memory_usage": if err := dec.Decode(&s.MemoryUsage); err != nil { - return err + return fmt.Errorf("%s | %w", "MemoryUsage", err) } case "node": if err := dec.Decode(&s.Node); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } case "progress": if err := dec.Decode(&s.Progress); err != nil { - return err + return fmt.Errorf("%s | %w", "Progress", err) } case "state": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } } diff --git a/typedapi/types/dataframeanalyticsauthorization.go b/typedapi/types/dataframeanalyticsauthorization.go index 88f1a5d49a..5dc53f44ca 100644 --- a/typedapi/types/dataframeanalyticsauthorization.go +++ b/typedapi/types/dataframeanalyticsauthorization.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalyticsAuthorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Authorization.ts#L45-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Authorization.ts#L45-L57 type DataframeAnalyticsAuthorization struct { // ApiKey If an API key was used for the most recent update to the job, its name and // identifier are listed in the response. @@ -60,18 +61,18 @@ func (s *DataframeAnalyticsAuthorization) UnmarshalJSON(data []byte) error { case "api_key": if err := dec.Decode(&s.ApiKey); err != nil { - return err + return fmt.Errorf("%s | %w", "ApiKey", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "service_account": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ServiceAccount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/dataframeanalyticsdestination.go b/typedapi/types/dataframeanalyticsdestination.go index 8b2335cb08..571d419b73 100644 --- a/typedapi/types/dataframeanalyticsdestination.go +++ b/typedapi/types/dataframeanalyticsdestination.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DataframeAnalyticsDestination type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L77-L82 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L77-L82 type DataframeAnalyticsDestination struct { // Index Defines the destination index to store the results of the data frame // analytics job. @@ -56,12 +57,12 @@ func (s *DataframeAnalyticsDestination) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "results_field": if err := dec.Decode(&s.ResultsField); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } } diff --git a/typedapi/types/dataframeanalyticsfieldselection.go b/typedapi/types/dataframeanalyticsfieldselection.go index f3b47ff20f..3646c34248 100644 --- a/typedapi/types/dataframeanalyticsfieldselection.go +++ b/typedapi/types/dataframeanalyticsfieldselection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalyticsFieldSelection type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L55-L68 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L55-L68 type DataframeAnalyticsFieldSelection struct { // FeatureType The feature type of this field for the analysis. May be categorical or // numerical. @@ -65,7 +66,7 @@ func (s *DataframeAnalyticsFieldSelection) UnmarshalJSON(data []byte) error { case "feature_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -81,7 +82,7 @@ func (s *DataframeAnalyticsFieldSelection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsIncluded", err) } s.IsIncluded = value case bool: @@ -95,7 +96,7 @@ func (s *DataframeAnalyticsFieldSelection) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsRequired", err) } s.IsRequired = value case bool: @@ -104,18 +105,18 @@ func (s *DataframeAnalyticsFieldSelection) UnmarshalJSON(data []byte) error { case "mapping_types": if err := dec.Decode(&s.MappingTypes); err != nil { - return err + return fmt.Errorf("%s | %w", "MappingTypes", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/dataframeanalyticsmemoryestimation.go b/typedapi/types/dataframeanalyticsmemoryestimation.go index 9bc9fbb459..d2c5163a13 100644 --- a/typedapi/types/dataframeanalyticsmemoryestimation.go +++ b/typedapi/types/dataframeanalyticsmemoryestimation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalyticsMemoryEstimation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L70-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L70-L75 type DataframeAnalyticsMemoryEstimation struct { // ExpectedMemoryWithDisk Estimated memory usage under the assumption that overflowing to disk is // allowed during data frame analytics. expected_memory_with_disk is usually @@ -60,7 +61,7 @@ func (s *DataframeAnalyticsMemoryEstimation) UnmarshalJSON(data []byte) error { case "expected_memory_with_disk": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ExpectedMemoryWithDisk", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,7 +73,7 @@ func (s *DataframeAnalyticsMemoryEstimation) UnmarshalJSON(data []byte) error { case "expected_memory_without_disk": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ExpectedMemoryWithoutDisk", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/dataframeanalyticsrecord.go b/typedapi/types/dataframeanalyticsrecord.go index 5c14976bae..cc2e7a9300 100644 --- a/typedapi/types/dataframeanalyticsrecord.go +++ b/typedapi/types/dataframeanalyticsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataFrameAnalyticsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/ml_data_frame_analytics/types.ts#L22-L102 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/ml_data_frame_analytics/types.ts#L22-L102 type DataFrameAnalyticsRecord struct { // AssignmentExplanation Messages related to the selection of a node. AssignmentExplanation *string `json:"assignment_explanation,omitempty"` @@ -85,7 +86,7 @@ func (s *DataFrameAnalyticsRecord) UnmarshalJSON(data []byte) error { case "assignment_explanation", "ae", "assignmentExplanation": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AssignmentExplanation", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -97,7 +98,7 @@ func (s *DataFrameAnalyticsRecord) UnmarshalJSON(data []byte) error { case "create_time", "ct", "createTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CreateTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,7 +110,7 @@ func (s *DataFrameAnalyticsRecord) UnmarshalJSON(data []byte) error { case "description", "d": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -120,13 +121,13 @@ func (s *DataFrameAnalyticsRecord) UnmarshalJSON(data []byte) error { case "dest_index", "di", "destIndex": if err := dec.Decode(&s.DestIndex); err != nil { - return err + return fmt.Errorf("%s | %w", "DestIndex", err) } case "failure_reason", "fr", "failureReason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FailureReason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -137,13 +138,13 @@ func (s *DataFrameAnalyticsRecord) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "model_memory_limit", "mml", "modelMemoryLimit": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelMemoryLimit", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -155,7 +156,7 @@ func (s *DataFrameAnalyticsRecord) UnmarshalJSON(data []byte) error { case "node.address", "na", "nodeAddress": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeAddress", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -166,23 +167,23 @@ func (s *DataFrameAnalyticsRecord) UnmarshalJSON(data []byte) error { case "node.ephemeral_id", "ne", "nodeEphemeralId": if err := dec.Decode(&s.NodeEphemeralId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeEphemeralId", err) } case "node.id", "ni", "nodeId": if err := dec.Decode(&s.NodeId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } case "node.name", "nn", "nodeName": if err := dec.Decode(&s.NodeName); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeName", err) } case "progress", "p": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Progress", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -193,13 +194,13 @@ func (s *DataFrameAnalyticsRecord) UnmarshalJSON(data []byte) error { case "source_index", "si", "sourceIndex": if err := dec.Decode(&s.SourceIndex); err != nil { - return err + return fmt.Errorf("%s | %w", "SourceIndex", err) } case "state", "s": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -211,7 +212,7 @@ func (s *DataFrameAnalyticsRecord) UnmarshalJSON(data []byte) error { case "type", "t": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -222,7 +223,7 @@ func (s *DataFrameAnalyticsRecord) UnmarshalJSON(data []byte) error { case "version", "v": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/dataframeanalyticssource.go b/typedapi/types/dataframeanalyticssource.go index 7d941efdc9..ee60116214 100644 --- a/typedapi/types/dataframeanalyticssource.go +++ b/typedapi/types/dataframeanalyticssource.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DataframeAnalyticsSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L39-L53 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L39-L53 type DataframeAnalyticsSource struct { // Index Index or indices on which to perform the analysis. It can be a single index // or index pattern as well as an array of indices or patterns. NOTE: If your @@ -72,29 +73,29 @@ func (s *DataframeAnalyticsSource) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = append(s.Index, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } } diff --git a/typedapi/types/dataframeanalyticsstatscontainer.go b/typedapi/types/dataframeanalyticsstatscontainer.go index efb2498b0c..b1709ee6b6 100644 --- a/typedapi/types/dataframeanalyticsstatscontainer.go +++ b/typedapi/types/dataframeanalyticsstatscontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DataframeAnalyticsStatsContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L373-L381 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L373-L381 type DataframeAnalyticsStatsContainer struct { // ClassificationStats An object containing information about the classification analysis job. ClassificationStats *DataframeAnalyticsStatsHyperparameters `json:"classification_stats,omitempty"` diff --git a/typedapi/types/dataframeanalyticsstatsdatacounts.go b/typedapi/types/dataframeanalyticsstatsdatacounts.go index 0a75eeb1e5..43b8adae1e 100644 --- a/typedapi/types/dataframeanalyticsstatsdatacounts.go +++ b/typedapi/types/dataframeanalyticsstatsdatacounts.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalyticsStatsDataCounts type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L364-L371 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L364-L371 type DataframeAnalyticsStatsDataCounts struct { // SkippedDocsCount The number of documents that are skipped during the analysis because they // contained values that are not supported by the analysis. For example, outlier @@ -68,7 +69,7 @@ func (s *DataframeAnalyticsStatsDataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SkippedDocsCount", err) } s.SkippedDocsCount = value case float64: @@ -84,7 +85,7 @@ func (s *DataframeAnalyticsStatsDataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TestDocsCount", err) } s.TestDocsCount = value case float64: @@ -100,7 +101,7 @@ func (s *DataframeAnalyticsStatsDataCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TrainingDocsCount", err) } s.TrainingDocsCount = value case float64: diff --git a/typedapi/types/dataframeanalyticsstatshyperparameters.go b/typedapi/types/dataframeanalyticsstatshyperparameters.go index 320086cbf6..ed57a292d8 100644 --- a/typedapi/types/dataframeanalyticsstatshyperparameters.go +++ b/typedapi/types/dataframeanalyticsstatshyperparameters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalyticsStatsHyperparameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L383-L402 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L383-L402 type DataframeAnalyticsStatsHyperparameters struct { // Hyperparameters An object containing the parameters of the classification analysis job. Hyperparameters Hyperparameters `json:"hyperparameters"` @@ -62,7 +63,7 @@ func (s *DataframeAnalyticsStatsHyperparameters) UnmarshalJSON(data []byte) erro case "hyperparameters": if err := dec.Decode(&s.Hyperparameters); err != nil { - return err + return fmt.Errorf("%s | %w", "Hyperparameters", err) } case "iteration": @@ -73,7 +74,7 @@ func (s *DataframeAnalyticsStatsHyperparameters) UnmarshalJSON(data []byte) erro case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Iteration", err) } s.Iteration = value case float64: @@ -83,17 +84,17 @@ func (s *DataframeAnalyticsStatsHyperparameters) UnmarshalJSON(data []byte) erro case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } case "timing_stats": if err := dec.Decode(&s.TimingStats); err != nil { - return err + return fmt.Errorf("%s | %w", "TimingStats", err) } case "validation_loss": if err := dec.Decode(&s.ValidationLoss); err != nil { - return err + return fmt.Errorf("%s | %w", "ValidationLoss", err) } } diff --git a/typedapi/types/dataframeanalyticsstatsmemoryusage.go b/typedapi/types/dataframeanalyticsstatsmemoryusage.go index 5f55222bc8..b29b7d7cdb 100644 --- a/typedapi/types/dataframeanalyticsstatsmemoryusage.go +++ b/typedapi/types/dataframeanalyticsstatsmemoryusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalyticsStatsMemoryUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L353-L362 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L353-L362 type DataframeAnalyticsStatsMemoryUsage struct { // MemoryReestimateBytes This value is present when the status is hard_limit and it is a new estimate // of how much memory the job needs. @@ -65,7 +66,7 @@ func (s *DataframeAnalyticsStatsMemoryUsage) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MemoryReestimateBytes", err) } s.MemoryReestimateBytes = &value case float64: @@ -80,7 +81,7 @@ func (s *DataframeAnalyticsStatsMemoryUsage) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PeakUsageBytes", err) } s.PeakUsageBytes = value case float64: @@ -91,7 +92,7 @@ func (s *DataframeAnalyticsStatsMemoryUsage) UnmarshalJSON(data []byte) error { case "status": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -102,7 +103,7 @@ func (s *DataframeAnalyticsStatsMemoryUsage) UnmarshalJSON(data []byte) error { case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } } diff --git a/typedapi/types/dataframeanalyticsstatsoutlierdetection.go b/typedapi/types/dataframeanalyticsstatsoutlierdetection.go index 95435e9fbf..3eabb7c857 100644 --- a/typedapi/types/dataframeanalyticsstatsoutlierdetection.go +++ b/typedapi/types/dataframeanalyticsstatsoutlierdetection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DataframeAnalyticsStatsOutlierDetection type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L404-L417 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L404-L417 type DataframeAnalyticsStatsOutlierDetection struct { // Parameters The list of job parameters specified by the user or determined by algorithmic // heuristics. @@ -58,17 +59,17 @@ func (s *DataframeAnalyticsStatsOutlierDetection) UnmarshalJSON(data []byte) err case "parameters": if err := dec.Decode(&s.Parameters); err != nil { - return err + return fmt.Errorf("%s | %w", "Parameters", err) } case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } case "timing_stats": if err := dec.Decode(&s.TimingStats); err != nil { - return err + return fmt.Errorf("%s | %w", "TimingStats", err) } } diff --git a/typedapi/types/dataframeanalyticsstatsprogress.go b/typedapi/types/dataframeanalyticsstatsprogress.go index dbea22389e..3454463541 100644 --- a/typedapi/types/dataframeanalyticsstatsprogress.go +++ b/typedapi/types/dataframeanalyticsstatsprogress.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalyticsStatsProgress type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L346-L351 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L346-L351 type DataframeAnalyticsStatsProgress struct { // Phase Defines the phase of the data frame analytics job. Phase string `json:"phase"` @@ -57,7 +58,7 @@ func (s *DataframeAnalyticsStatsProgress) UnmarshalJSON(data []byte) error { case "phase": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Phase", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,7 +75,7 @@ func (s *DataframeAnalyticsStatsProgress) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ProgressPercent", err) } s.ProgressPercent = value case float64: diff --git a/typedapi/types/dataframeanalyticssummary.go b/typedapi/types/dataframeanalyticssummary.go index 2f235cd4bc..d1d1a3e8b8 100644 --- a/typedapi/types/dataframeanalyticssummary.go +++ b/typedapi/types/dataframeanalyticssummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeAnalyticsSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L306-L322 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L306-L322 type DataframeAnalyticsSummary struct { AllowLazyStart *bool `json:"allow_lazy_start,omitempty"` Analysis DataframeAnalysisContainer `json:"analysis"` @@ -71,7 +72,7 @@ func (s *DataframeAnalyticsSummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowLazyStart", err) } s.AllowLazyStart = &value case bool: @@ -80,28 +81,28 @@ func (s *DataframeAnalyticsSummary) UnmarshalJSON(data []byte) error { case "analysis": if err := dec.Decode(&s.Analysis); err != nil { - return err + return fmt.Errorf("%s | %w", "Analysis", err) } case "analyzed_fields": if err := dec.Decode(&s.AnalyzedFields); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalyzedFields", err) } case "authorization": if err := dec.Decode(&s.Authorization); err != nil { - return err + return fmt.Errorf("%s | %w", "Authorization", err) } case "create_time": if err := dec.Decode(&s.CreateTime); err != nil { - return err + return fmt.Errorf("%s | %w", "CreateTime", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -112,12 +113,12 @@ func (s *DataframeAnalyticsSummary) UnmarshalJSON(data []byte) error { case "dest": if err := dec.Decode(&s.Dest); err != nil { - return err + return fmt.Errorf("%s | %w", "Dest", err) } case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "max_num_threads": @@ -128,7 +129,7 @@ func (s *DataframeAnalyticsSummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxNumThreads", err) } s.MaxNumThreads = &value case float64: @@ -139,7 +140,7 @@ func (s *DataframeAnalyticsSummary) UnmarshalJSON(data []byte) error { case "model_memory_limit": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelMemoryLimit", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -150,12 +151,12 @@ func (s *DataframeAnalyticsSummary) UnmarshalJSON(data []byte) error { case "source": if err := dec.Decode(&s.Source); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/dataframeclassificationsummary.go b/typedapi/types/dataframeclassificationsummary.go index f743822f7c..9c721088d5 100644 --- a/typedapi/types/dataframeclassificationsummary.go +++ b/typedapi/types/dataframeclassificationsummary.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DataframeClassificationSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L44-L66 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L44-L66 type DataframeClassificationSummary struct { // Accuracy Accuracy of predictions (per-class and overall). Accuracy *DataframeClassificationSummaryAccuracy `json:"accuracy,omitempty"` diff --git a/typedapi/types/dataframeclassificationsummaryaccuracy.go b/typedapi/types/dataframeclassificationsummaryaccuracy.go index 49e5bfc6ed..f0adb534fa 100644 --- a/typedapi/types/dataframeclassificationsummaryaccuracy.go +++ b/typedapi/types/dataframeclassificationsummaryaccuracy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeClassificationSummaryAccuracy type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L111-L114 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L111-L114 type DataframeClassificationSummaryAccuracy struct { Classes []DataframeEvaluationClass `json:"classes"` OverallAccuracy Float64 `json:"overall_accuracy"` @@ -53,7 +54,7 @@ func (s *DataframeClassificationSummaryAccuracy) UnmarshalJSON(data []byte) erro case "classes": if err := dec.Decode(&s.Classes); err != nil { - return err + return fmt.Errorf("%s | %w", "Classes", err) } case "overall_accuracy": @@ -63,7 +64,7 @@ func (s *DataframeClassificationSummaryAccuracy) UnmarshalJSON(data []byte) erro case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "OverallAccuracy", err) } f := Float64(value) s.OverallAccuracy = f diff --git a/typedapi/types/dataframeclassificationsummarymulticlassconfusionmatrix.go b/typedapi/types/dataframeclassificationsummarymulticlassconfusionmatrix.go index 728d15a666..17dcc6fe2e 100644 --- a/typedapi/types/dataframeclassificationsummarymulticlassconfusionmatrix.go +++ b/typedapi/types/dataframeclassificationsummarymulticlassconfusionmatrix.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeClassificationSummaryMulticlassConfusionMatrix type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L120-L123 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L120-L123 type DataframeClassificationSummaryMulticlassConfusionMatrix struct { ConfusionMatrix []ConfusionMatrixItem `json:"confusion_matrix"` OtherActualClassCount int `json:"other_actual_class_count"` @@ -53,7 +54,7 @@ func (s *DataframeClassificationSummaryMulticlassConfusionMatrix) UnmarshalJSON( case "confusion_matrix": if err := dec.Decode(&s.ConfusionMatrix); err != nil { - return err + return fmt.Errorf("%s | %w", "ConfusionMatrix", err) } case "other_actual_class_count": @@ -64,7 +65,7 @@ func (s *DataframeClassificationSummaryMulticlassConfusionMatrix) UnmarshalJSON( case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OtherActualClassCount", err) } s.OtherActualClassCount = value case float64: diff --git a/typedapi/types/dataframeclassificationsummaryprecision.go b/typedapi/types/dataframeclassificationsummaryprecision.go index 4949112060..e90ccef197 100644 --- a/typedapi/types/dataframeclassificationsummaryprecision.go +++ b/typedapi/types/dataframeclassificationsummaryprecision.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeClassificationSummaryPrecision type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L101-L104 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L101-L104 type DataframeClassificationSummaryPrecision struct { AvgPrecision Float64 `json:"avg_precision"` Classes []DataframeEvaluationClass `json:"classes"` @@ -58,7 +59,7 @@ func (s *DataframeClassificationSummaryPrecision) UnmarshalJSON(data []byte) err case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AvgPrecision", err) } f := Float64(value) s.AvgPrecision = f @@ -69,7 +70,7 @@ func (s *DataframeClassificationSummaryPrecision) UnmarshalJSON(data []byte) err case "classes": if err := dec.Decode(&s.Classes); err != nil { - return err + return fmt.Errorf("%s | %w", "Classes", err) } } diff --git a/typedapi/types/dataframeclassificationsummaryrecall.go b/typedapi/types/dataframeclassificationsummaryrecall.go index cbae56faba..a0a895b02b 100644 --- a/typedapi/types/dataframeclassificationsummaryrecall.go +++ b/typedapi/types/dataframeclassificationsummaryrecall.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeClassificationSummaryRecall type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L106-L109 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L106-L109 type DataframeClassificationSummaryRecall struct { AvgRecall Float64 `json:"avg_recall"` Classes []DataframeEvaluationClass `json:"classes"` @@ -58,7 +59,7 @@ func (s *DataframeClassificationSummaryRecall) UnmarshalJSON(data []byte) error case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AvgRecall", err) } f := Float64(value) s.AvgRecall = f @@ -69,7 +70,7 @@ func (s *DataframeClassificationSummaryRecall) UnmarshalJSON(data []byte) error case "classes": if err := dec.Decode(&s.Classes); err != nil { - return err + return fmt.Errorf("%s | %w", "Classes", err) } } diff --git a/typedapi/types/dataframeevaluationclass.go b/typedapi/types/dataframeevaluationclass.go index b62a380528..72e073164f 100644 --- a/typedapi/types/dataframeevaluationclass.go +++ b/typedapi/types/dataframeevaluationclass.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeEvaluationClass type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L116-L118 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L116-L118 type DataframeEvaluationClass struct { ClassName string `json:"class_name"` Value Float64 `json:"value"` @@ -53,7 +54,7 @@ func (s *DataframeEvaluationClass) UnmarshalJSON(data []byte) error { case "class_name": if err := dec.Decode(&s.ClassName); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassName", err) } case "value": @@ -63,7 +64,7 @@ func (s *DataframeEvaluationClass) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } f := Float64(value) s.Value = f diff --git a/typedapi/types/dataframeevaluationclassification.go b/typedapi/types/dataframeevaluationclassification.go index b52779a773..aa92d06389 100644 --- a/typedapi/types/dataframeevaluationclassification.go +++ b/typedapi/types/dataframeevaluationclassification.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DataframeEvaluationClassification type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeEvaluation.ts#L35-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeEvaluation.ts#L35-L44 type DataframeEvaluationClassification struct { // ActualField The field of the index which contains the ground truth. The data type of this // field can be boolean or integer. If the data type is integer, the value has @@ -63,22 +64,22 @@ func (s *DataframeEvaluationClassification) UnmarshalJSON(data []byte) error { case "actual_field": if err := dec.Decode(&s.ActualField); err != nil { - return err + return fmt.Errorf("%s | %w", "ActualField", err) } case "metrics": if err := dec.Decode(&s.Metrics); err != nil { - return err + return fmt.Errorf("%s | %w", "Metrics", err) } case "predicted_field": if err := dec.Decode(&s.PredictedField); err != nil { - return err + return fmt.Errorf("%s | %w", "PredictedField", err) } case "top_classes_field": if err := dec.Decode(&s.TopClassesField); err != nil { - return err + return fmt.Errorf("%s | %w", "TopClassesField", err) } } diff --git a/typedapi/types/dataframeevaluationclassificationmetrics.go b/typedapi/types/dataframeevaluationclassificationmetrics.go index 7c1e3f66a9..e31c00b55c 100644 --- a/typedapi/types/dataframeevaluationclassificationmetrics.go +++ b/typedapi/types/dataframeevaluationclassificationmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // DataframeEvaluationClassificationMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeEvaluation.ts#L73-L78 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeEvaluation.ts#L73-L78 type DataframeEvaluationClassificationMetrics struct { // Accuracy Accuracy of predictions (per-class and overall). Accuracy map[string]json.RawMessage `json:"accuracy,omitempty"` diff --git a/typedapi/types/dataframeevaluationclassificationmetricsaucroc.go b/typedapi/types/dataframeevaluationclassificationmetricsaucroc.go index a252b71020..1bceb7bfeb 100644 --- a/typedapi/types/dataframeevaluationclassificationmetricsaucroc.go +++ b/typedapi/types/dataframeevaluationclassificationmetricsaucroc.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeEvaluationClassificationMetricsAucRoc type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeEvaluation.ts#L85-L90 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeEvaluation.ts#L85-L90 type DataframeEvaluationClassificationMetricsAucRoc struct { // ClassName Name of the only class that is treated as positive during AUC ROC // calculation. Other classes are treated as negative ("one-vs-all" strategy). @@ -59,7 +60,7 @@ func (s *DataframeEvaluationClassificationMetricsAucRoc) UnmarshalJSON(data []by case "class_name": if err := dec.Decode(&s.ClassName); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassName", err) } case "include_curve": @@ -69,7 +70,7 @@ func (s *DataframeEvaluationClassificationMetricsAucRoc) UnmarshalJSON(data []by case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeCurve", err) } s.IncludeCurve = &value case bool: diff --git a/typedapi/types/dataframeevaluationcontainer.go b/typedapi/types/dataframeevaluationcontainer.go index bab4b48cca..bea1bd85b0 100644 --- a/typedapi/types/dataframeevaluationcontainer.go +++ b/typedapi/types/dataframeevaluationcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DataframeEvaluationContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeEvaluation.ts#L25-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeEvaluation.ts#L25-L33 type DataframeEvaluationContainer struct { // Classification Classification evaluation evaluates the results of a classification analysis // which outputs a prediction that identifies to which of the classes each diff --git a/typedapi/types/dataframeevaluationmetrics.go b/typedapi/types/dataframeevaluationmetrics.go index f7b08b8e7d..64497f1b4e 100644 --- a/typedapi/types/dataframeevaluationmetrics.go +++ b/typedapi/types/dataframeevaluationmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // DataframeEvaluationMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeEvaluation.ts#L64-L71 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeEvaluation.ts#L64-L71 type DataframeEvaluationMetrics struct { // AucRoc The AUC ROC (area under the curve of the receiver operating characteristic) // score and optionally the curve. It is calculated for a specific class diff --git a/typedapi/types/dataframeevaluationoutlierdetection.go b/typedapi/types/dataframeevaluationoutlierdetection.go index 39b70c3216..346b0a5447 100644 --- a/typedapi/types/dataframeevaluationoutlierdetection.go +++ b/typedapi/types/dataframeevaluationoutlierdetection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DataframeEvaluationOutlierDetection type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeEvaluation.ts#L46-L53 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeEvaluation.ts#L46-L53 type DataframeEvaluationOutlierDetection struct { // ActualField The field of the index which contains the ground truth. The data type of this // field can be boolean or integer. If the data type is integer, the value has @@ -60,17 +61,17 @@ func (s *DataframeEvaluationOutlierDetection) UnmarshalJSON(data []byte) error { case "actual_field": if err := dec.Decode(&s.ActualField); err != nil { - return err + return fmt.Errorf("%s | %w", "ActualField", err) } case "metrics": if err := dec.Decode(&s.Metrics); err != nil { - return err + return fmt.Errorf("%s | %w", "Metrics", err) } case "predicted_probability_field": if err := dec.Decode(&s.PredictedProbabilityField); err != nil { - return err + return fmt.Errorf("%s | %w", "PredictedProbabilityField", err) } } diff --git a/typedapi/types/dataframeevaluationoutlierdetectionmetrics.go b/typedapi/types/dataframeevaluationoutlierdetectionmetrics.go index 7520614ff1..f025bbe838 100644 --- a/typedapi/types/dataframeevaluationoutlierdetectionmetrics.go +++ b/typedapi/types/dataframeevaluationoutlierdetectionmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // DataframeEvaluationOutlierDetectionMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeEvaluation.ts#L80-L83 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeEvaluation.ts#L80-L83 type DataframeEvaluationOutlierDetectionMetrics struct { // AucRoc The AUC ROC (area under the curve of the receiver operating characteristic) // score and optionally the curve. It is calculated for a specific class diff --git a/typedapi/types/dataframeevaluationregression.go b/typedapi/types/dataframeevaluationregression.go index c9fdec467e..ffb5c53d30 100644 --- a/typedapi/types/dataframeevaluationregression.go +++ b/typedapi/types/dataframeevaluationregression.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DataframeEvaluationRegression type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeEvaluation.ts#L55-L62 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeEvaluation.ts#L55-L62 type DataframeEvaluationRegression struct { // ActualField The field of the index which contains the ground truth. The data type of this // field must be numerical. @@ -60,17 +61,17 @@ func (s *DataframeEvaluationRegression) UnmarshalJSON(data []byte) error { case "actual_field": if err := dec.Decode(&s.ActualField); err != nil { - return err + return fmt.Errorf("%s | %w", "ActualField", err) } case "metrics": if err := dec.Decode(&s.Metrics); err != nil { - return err + return fmt.Errorf("%s | %w", "Metrics", err) } case "predicted_field": if err := dec.Decode(&s.PredictedField); err != nil { - return err + return fmt.Errorf("%s | %w", "PredictedField", err) } } diff --git a/typedapi/types/dataframeevaluationregressionmetrics.go b/typedapi/types/dataframeevaluationregressionmetrics.go index cc5178e7c5..48b391c84f 100644 --- a/typedapi/types/dataframeevaluationregressionmetrics.go +++ b/typedapi/types/dataframeevaluationregressionmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // DataframeEvaluationRegressionMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeEvaluation.ts#L92-L110 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeEvaluation.ts#L92-L110 type DataframeEvaluationRegressionMetrics struct { // Huber Pseudo Huber loss function. Huber *DataframeEvaluationRegressionMetricsHuber `json:"huber,omitempty"` diff --git a/typedapi/types/dataframeevaluationregressionmetricshuber.go b/typedapi/types/dataframeevaluationregressionmetricshuber.go index f0986b5acc..d159f7dd96 100644 --- a/typedapi/types/dataframeevaluationregressionmetricshuber.go +++ b/typedapi/types/dataframeevaluationregressionmetricshuber.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeEvaluationRegressionMetricsHuber type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeEvaluation.ts#L117-L120 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeEvaluation.ts#L117-L120 type DataframeEvaluationRegressionMetricsHuber struct { // Delta Approximates 1/2 (prediction - actual)2 for values much less than delta and // approximates a straight line with slope delta for values much larger than @@ -60,7 +61,7 @@ func (s *DataframeEvaluationRegressionMetricsHuber) UnmarshalJSON(data []byte) e case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Delta", err) } f := Float64(value) s.Delta = &f diff --git a/typedapi/types/dataframeevaluationregressionmetricsmsle.go b/typedapi/types/dataframeevaluationregressionmetricsmsle.go index f28044570f..a6f9ddb709 100644 --- a/typedapi/types/dataframeevaluationregressionmetricsmsle.go +++ b/typedapi/types/dataframeevaluationregressionmetricsmsle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeEvaluationRegressionMetricsMsle type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeEvaluation.ts#L112-L115 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeEvaluation.ts#L112-L115 type DataframeEvaluationRegressionMetricsMsle struct { // Offset Defines the transition point at which you switch from minimizing quadratic // error to minimizing quadratic log error. Defaults to 1. @@ -59,7 +60,7 @@ func (s *DataframeEvaluationRegressionMetricsMsle) UnmarshalJSON(data []byte) er case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Offset", err) } f := Float64(value) s.Offset = &f diff --git a/typedapi/types/dataframeevaluationsummaryaucroc.go b/typedapi/types/dataframeevaluationsummaryaucroc.go index 5cf5f9cefc..060e24a6d2 100644 --- a/typedapi/types/dataframeevaluationsummaryaucroc.go +++ b/typedapi/types/dataframeevaluationsummaryaucroc.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeEvaluationSummaryAucRoc type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L91-L93 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L91-L93 type DataframeEvaluationSummaryAucRoc struct { Curve []DataframeEvaluationSummaryAucRocCurveItem `json:"curve,omitempty"` Value Float64 `json:"value"` @@ -53,7 +54,7 @@ func (s *DataframeEvaluationSummaryAucRoc) UnmarshalJSON(data []byte) error { case "curve": if err := dec.Decode(&s.Curve); err != nil { - return err + return fmt.Errorf("%s | %w", "Curve", err) } case "value": @@ -63,7 +64,7 @@ func (s *DataframeEvaluationSummaryAucRoc) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } f := Float64(value) s.Value = f diff --git a/typedapi/types/dataframeevaluationsummaryaucroccurveitem.go b/typedapi/types/dataframeevaluationsummaryaucroccurveitem.go index afe98e8776..29c6258301 100644 --- a/typedapi/types/dataframeevaluationsummaryaucroccurveitem.go +++ b/typedapi/types/dataframeevaluationsummaryaucroccurveitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeEvaluationSummaryAucRocCurveItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L95-L99 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L95-L99 type DataframeEvaluationSummaryAucRocCurveItem struct { Fpr Float64 `json:"fpr"` Threshold Float64 `json:"threshold"` @@ -59,7 +60,7 @@ func (s *DataframeEvaluationSummaryAucRocCurveItem) UnmarshalJSON(data []byte) e case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Fpr", err) } f := Float64(value) s.Fpr = f @@ -75,7 +76,7 @@ func (s *DataframeEvaluationSummaryAucRocCurveItem) UnmarshalJSON(data []byte) e case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Threshold", err) } f := Float64(value) s.Threshold = f @@ -91,7 +92,7 @@ func (s *DataframeEvaluationSummaryAucRocCurveItem) UnmarshalJSON(data []byte) e case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Tpr", err) } f := Float64(value) s.Tpr = f diff --git a/typedapi/types/dataframeevaluationvalue.go b/typedapi/types/dataframeevaluationvalue.go index 381987f8a3..feb27e679a 100644 --- a/typedapi/types/dataframeevaluationvalue.go +++ b/typedapi/types/dataframeevaluationvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframeEvaluationValue type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L87-L89 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L87-L89 type DataframeEvaluationValue struct { Value Float64 `json:"value"` } @@ -57,7 +58,7 @@ func (s *DataframeEvaluationValue) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } f := Float64(value) s.Value = f diff --git a/typedapi/types/dataframeoutlierdetectionsummary.go b/typedapi/types/dataframeoutlierdetectionsummary.go index 07ddab4423..95030f8061 100644 --- a/typedapi/types/dataframeoutlierdetectionsummary.go +++ b/typedapi/types/dataframeoutlierdetectionsummary.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DataframeOutlierDetectionSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L24-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L24-L42 type DataframeOutlierDetectionSummary struct { // AucRoc The AUC ROC (area under the curve of the receiver operating characteristic) // score and optionally the curve. diff --git a/typedapi/types/dataframepreviewconfig.go b/typedapi/types/dataframepreviewconfig.go index 49c3088632..ee8aa1db4d 100644 --- a/typedapi/types/dataframepreviewconfig.go +++ b/typedapi/types/dataframepreviewconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataframePreviewConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/preview_data_frame_analytics/types.ts#L27-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/preview_data_frame_analytics/types.ts#L27-L33 type DataframePreviewConfig struct { Analysis DataframeAnalysisContainer `json:"analysis"` AnalyzedFields *DataframeAnalysisAnalyzedFields `json:"analyzed_fields,omitempty"` @@ -56,12 +57,12 @@ func (s *DataframePreviewConfig) UnmarshalJSON(data []byte) error { case "analysis": if err := dec.Decode(&s.Analysis); err != nil { - return err + return fmt.Errorf("%s | %w", "Analysis", err) } case "analyzed_fields": if err := dec.Decode(&s.AnalyzedFields); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalyzedFields", err) } case "max_num_threads": @@ -72,7 +73,7 @@ func (s *DataframePreviewConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxNumThreads", err) } s.MaxNumThreads = &value case float64: @@ -83,7 +84,7 @@ func (s *DataframePreviewConfig) UnmarshalJSON(data []byte) error { case "model_memory_limit": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelMemoryLimit", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -94,7 +95,7 @@ func (s *DataframePreviewConfig) UnmarshalJSON(data []byte) error { case "source": if err := dec.Decode(&s.Source); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } } diff --git a/typedapi/types/dataframeregressionsummary.go b/typedapi/types/dataframeregressionsummary.go index 5256a10c3a..205c18adfb 100644 --- a/typedapi/types/dataframeregressionsummary.go +++ b/typedapi/types/dataframeregressionsummary.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DataframeRegressionSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/evaluate_data_frame/types.ts#L68-L85 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/evaluate_data_frame/types.ts#L68-L85 type DataframeRegressionSummary struct { // Huber Pseudo Huber loss function. Huber *DataframeEvaluationValue `json:"huber,omitempty"` diff --git a/typedapi/types/datapathstats.go b/typedapi/types/datapathstats.go index 41a1b1a971..ec3a19ce10 100644 --- a/typedapi/types/datapathstats.go +++ b/typedapi/types/datapathstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataPathStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L550-L594 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L550-L594 type DataPathStats struct { // Available Total amount of disk space available to this Java virtual machine on this // file store. @@ -79,7 +80,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case "available": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -95,7 +96,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AvailableInBytes", err) } s.AvailableInBytes = &value case float64: @@ -106,7 +107,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case "disk_queue": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DiskQueue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -118,7 +119,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case "disk_read_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DiskReadSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -134,7 +135,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DiskReadSizeInBytes", err) } s.DiskReadSizeInBytes = &value case float64: @@ -149,7 +150,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DiskReads", err) } s.DiskReads = &value case float64: @@ -160,7 +161,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case "disk_write_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DiskWriteSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -176,7 +177,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DiskWriteSizeInBytes", err) } s.DiskWriteSizeInBytes = &value case float64: @@ -191,7 +192,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DiskWrites", err) } s.DiskWrites = &value case float64: @@ -202,7 +203,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case "free": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Free", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -218,7 +219,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FreeInBytes", err) } s.FreeInBytes = &value case float64: @@ -229,7 +230,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case "mount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Mount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -241,7 +242,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case "path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -253,7 +254,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case "total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -269,7 +270,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalInBytes", err) } s.TotalInBytes = &value case float64: @@ -280,7 +281,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/datastream.go b/typedapi/types/datastream.go index bba211536b..c535af94bb 100644 --- a/typedapi/types/datastream.go +++ b/typedapi/types/datastream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // DataStream type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/DataStream.ts#L39-L112 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/DataStream.ts#L39-L112 type DataStream struct { // AllowCustomRouting If `true`, the data stream allows custom routing on write request. AllowCustomRouting *bool `json:"allow_custom_routing,omitempty"` @@ -111,7 +112,7 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowCustomRouting", err) } s.AllowCustomRouting = &value case bool: @@ -126,7 +127,7 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Generation", err) } s.Generation = value case float64: @@ -141,7 +142,7 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Hidden", err) } s.Hidden = value case bool: @@ -150,32 +151,32 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { case "ilm_policy": if err := dec.Decode(&s.IlmPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "IlmPolicy", err) } case "indices": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "lifecycle": if err := dec.Decode(&s.Lifecycle); err != nil { - return err + return fmt.Errorf("%s | %w", "Lifecycle", err) } case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "next_generation_managed_by": if err := dec.Decode(&s.NextGenerationManagedBy); err != nil { - return err + return fmt.Errorf("%s | %w", "NextGenerationManagedBy", err) } case "prefer_ilm": @@ -185,7 +186,7 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PreferIlm", err) } s.PreferIlm = value case bool: @@ -199,7 +200,7 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Replicated", err) } s.Replicated = &value case bool: @@ -208,7 +209,7 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "system": @@ -218,7 +219,7 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "System", err) } s.System = &value case bool: @@ -227,12 +228,12 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { case "template": if err := dec.Decode(&s.Template); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } case "timestamp_field": if err := dec.Decode(&s.TimestampField); err != nil { - return err + return fmt.Errorf("%s | %w", "TimestampField", err) } } diff --git a/typedapi/types/datastreamindex.go b/typedapi/types/datastreamindex.go index bd0e532fdd..d2ea334d89 100644 --- a/typedapi/types/datastreamindex.go +++ b/typedapi/types/datastreamindex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DataStreamIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/DataStream.ts#L121-L142 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/DataStream.ts#L121-L142 type DataStreamIndex struct { // IlmPolicy Name of the current ILM lifecycle policy configured for this backing index. IlmPolicy *string `json:"ilm_policy,omitempty"` @@ -64,22 +65,22 @@ func (s *DataStreamIndex) UnmarshalJSON(data []byte) error { case "ilm_policy": if err := dec.Decode(&s.IlmPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "IlmPolicy", err) } case "index_name": if err := dec.Decode(&s.IndexName); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexName", err) } case "index_uuid": if err := dec.Decode(&s.IndexUuid); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexUuid", err) } case "managed_by": if err := dec.Decode(&s.ManagedBy); err != nil { - return err + return fmt.Errorf("%s | %w", "ManagedBy", err) } case "prefer_ilm": @@ -89,7 +90,7 @@ func (s *DataStreamIndex) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PreferIlm", err) } s.PreferIlm = value case bool: diff --git a/typedapi/types/datastreamlifecycle.go b/typedapi/types/datastreamlifecycle.go index 59146700b7..3f01cea17d 100644 --- a/typedapi/types/datastreamlifecycle.go +++ b/typedapi/types/datastreamlifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DataStreamLifecycle type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/DataStreamLifecycle.ts#L25-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/DataStreamLifecycle.ts#L25-L31 type DataStreamLifecycle struct { DataRetention Duration `json:"data_retention,omitempty"` Downsampling *DataStreamLifecycleDownsampling `json:"downsampling,omitempty"` @@ -52,12 +53,12 @@ func (s *DataStreamLifecycle) UnmarshalJSON(data []byte) error { case "data_retention": if err := dec.Decode(&s.DataRetention); err != nil { - return err + return fmt.Errorf("%s | %w", "DataRetention", err) } case "downsampling": if err := dec.Decode(&s.Downsampling); err != nil { - return err + return fmt.Errorf("%s | %w", "Downsampling", err) } } diff --git a/typedapi/types/datastreamlifecycledownsampling.go b/typedapi/types/datastreamlifecycledownsampling.go index a849d0b92a..c50fd68413 100644 --- a/typedapi/types/datastreamlifecycledownsampling.go +++ b/typedapi/types/datastreamlifecycledownsampling.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DataStreamLifecycleDownsampling type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/DataStreamLifecycleDownsampling.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/DataStreamLifecycleDownsampling.ts#L22-L27 type DataStreamLifecycleDownsampling struct { // Rounds The list of downsampling rounds to execute as part of this downsampling // configuration diff --git a/typedapi/types/datastreamlifecycleexplain.go b/typedapi/types/datastreamlifecycleexplain.go index 1ae5a97ec8..d66a237b63 100644 --- a/typedapi/types/datastreamlifecycleexplain.go +++ b/typedapi/types/datastreamlifecycleexplain.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataStreamLifecycleExplain type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/explain_data_lifecycle/IndicesExplainDataLifecycleResponse.ts#L31-L41 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/explain_data_lifecycle/IndicesExplainDataLifecycleResponse.ts#L31-L41 type DataStreamLifecycleExplain struct { Error *string `json:"error,omitempty"` GenerationTime Duration `json:"generation_time,omitempty"` @@ -61,7 +62,7 @@ func (s *DataStreamLifecycleExplain) UnmarshalJSON(data []byte) error { case "error": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Error", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,22 +73,22 @@ func (s *DataStreamLifecycleExplain) UnmarshalJSON(data []byte) error { case "generation_time": if err := dec.Decode(&s.GenerationTime); err != nil { - return err + return fmt.Errorf("%s | %w", "GenerationTime", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "index_creation_date_millis": if err := dec.Decode(&s.IndexCreationDateMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexCreationDateMillis", err) } case "lifecycle": if err := dec.Decode(&s.Lifecycle); err != nil { - return err + return fmt.Errorf("%s | %w", "Lifecycle", err) } case "managed_by_lifecycle": @@ -97,7 +98,7 @@ func (s *DataStreamLifecycleExplain) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ManagedByLifecycle", err) } s.ManagedByLifecycle = value case bool: @@ -106,17 +107,17 @@ func (s *DataStreamLifecycleExplain) UnmarshalJSON(data []byte) error { case "rollover_date_millis": if err := dec.Decode(&s.RolloverDateMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "RolloverDateMillis", err) } case "time_since_index_creation": if err := dec.Decode(&s.TimeSinceIndexCreation); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSinceIndexCreation", err) } case "time_since_rollover": if err := dec.Decode(&s.TimeSinceRollover); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSinceRollover", err) } } diff --git a/typedapi/types/datastreamlifecyclerolloverconditions.go b/typedapi/types/datastreamlifecyclerolloverconditions.go index 4f7504bdbd..146dbd7692 100644 --- a/typedapi/types/datastreamlifecyclerolloverconditions.go +++ b/typedapi/types/datastreamlifecyclerolloverconditions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataStreamLifecycleRolloverConditions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/DataStreamLifecycle.ts#L57-L69 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/DataStreamLifecycle.ts#L57-L69 type DataStreamLifecycleRolloverConditions struct { MaxAge *string `json:"max_age,omitempty"` MaxDocs *int64 `json:"max_docs,omitempty"` @@ -62,7 +63,7 @@ func (s *DataStreamLifecycleRolloverConditions) UnmarshalJSON(data []byte) error case "max_age": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAge", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,7 +79,7 @@ func (s *DataStreamLifecycleRolloverConditions) UnmarshalJSON(data []byte) error case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxDocs", err) } s.MaxDocs = &value case float64: @@ -93,7 +94,7 @@ func (s *DataStreamLifecycleRolloverConditions) UnmarshalJSON(data []byte) error case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxPrimaryShardDocs", err) } s.MaxPrimaryShardDocs = &value case float64: @@ -103,17 +104,17 @@ func (s *DataStreamLifecycleRolloverConditions) UnmarshalJSON(data []byte) error case "max_primary_shard_size": if err := dec.Decode(&s.MaxPrimaryShardSize); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxPrimaryShardSize", err) } case "max_size": if err := dec.Decode(&s.MaxSize); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxSize", err) } case "min_age": if err := dec.Decode(&s.MinAge); err != nil { - return err + return fmt.Errorf("%s | %w", "MinAge", err) } case "min_docs": @@ -123,7 +124,7 @@ func (s *DataStreamLifecycleRolloverConditions) UnmarshalJSON(data []byte) error case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocs", err) } s.MinDocs = &value case float64: @@ -138,7 +139,7 @@ func (s *DataStreamLifecycleRolloverConditions) UnmarshalJSON(data []byte) error case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinPrimaryShardDocs", err) } s.MinPrimaryShardDocs = &value case float64: @@ -148,12 +149,12 @@ func (s *DataStreamLifecycleRolloverConditions) UnmarshalJSON(data []byte) error case "min_primary_shard_size": if err := dec.Decode(&s.MinPrimaryShardSize); err != nil { - return err + return fmt.Errorf("%s | %w", "MinPrimaryShardSize", err) } case "min_size": if err := dec.Decode(&s.MinSize); err != nil { - return err + return fmt.Errorf("%s | %w", "MinSize", err) } } diff --git a/typedapi/types/datastreamlifecyclewithrollover.go b/typedapi/types/datastreamlifecyclewithrollover.go index 5144f390e9..1f1bf3e403 100644 --- a/typedapi/types/datastreamlifecyclewithrollover.go +++ b/typedapi/types/datastreamlifecyclewithrollover.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DataStreamLifecycleWithRollover type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/DataStreamLifecycle.ts#L33-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/DataStreamLifecycle.ts#L33-L55 type DataStreamLifecycleWithRollover struct { // DataRetention If defined, every document added to this data stream will be stored at least // for this time frame. @@ -64,17 +65,17 @@ func (s *DataStreamLifecycleWithRollover) UnmarshalJSON(data []byte) error { case "data_retention": if err := dec.Decode(&s.DataRetention); err != nil { - return err + return fmt.Errorf("%s | %w", "DataRetention", err) } case "downsampling": if err := dec.Decode(&s.Downsampling); err != nil { - return err + return fmt.Errorf("%s | %w", "Downsampling", err) } case "rollover": if err := dec.Decode(&s.Rollover); err != nil { - return err + return fmt.Errorf("%s | %w", "Rollover", err) } } diff --git a/typedapi/types/datastreamnames.go b/typedapi/types/datastreamnames.go index b96d2c2d89..b05eafee96 100644 --- a/typedapi/types/datastreamnames.go +++ b/typedapi/types/datastreamnames.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DataStreamNames type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L94-L94 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L94-L94 type DataStreamNames []string diff --git a/typedapi/types/datastreams.go b/typedapi/types/datastreams.go index 0fd205ac56..2bb04d1470 100644 --- a/typedapi/types/datastreams.go +++ b/typedapi/types/datastreams.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataStreams type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L81-L84 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L81-L84 type DataStreams struct { Available bool `json:"available"` DataStreams int64 `json:"data_streams"` @@ -60,7 +61,7 @@ func (s *DataStreams) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -74,7 +75,7 @@ func (s *DataStreams) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DataStreams", err) } s.DataStreams = value case float64: @@ -89,7 +90,7 @@ func (s *DataStreams) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -103,7 +104,7 @@ func (s *DataStreams) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesCount", err) } s.IndicesCount = value case float64: diff --git a/typedapi/types/datastreamsstatsitem.go b/typedapi/types/datastreamsstatsitem.go index 1d98327fe0..8443c67c42 100644 --- a/typedapi/types/datastreamsstatsitem.go +++ b/typedapi/types/datastreamsstatsitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataStreamsStatsItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/data_streams_stats/IndicesDataStreamsStatsResponse.ts#L45-L65 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/data_streams_stats/IndicesDataStreamsStatsResponse.ts#L45-L65 type DataStreamsStatsItem struct { // BackingIndices Current number of backing indices for the data stream. BackingIndices int `json:"backing_indices"` @@ -48,7 +49,7 @@ type DataStreamsStatsItem struct { // This parameter is only returned if the `human` query parameter is `true`. StoreSize ByteSize `json:"store_size,omitempty"` // StoreSizeBytes Total size, in bytes, of all shards for the data stream’s backing indices. - StoreSizeBytes int `json:"store_size_bytes"` + StoreSizeBytes int64 `json:"store_size_bytes"` } func (s *DataStreamsStatsItem) UnmarshalJSON(data []byte) error { @@ -74,7 +75,7 @@ func (s *DataStreamsStatsItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "BackingIndices", err) } s.BackingIndices = value case float64: @@ -84,32 +85,31 @@ func (s *DataStreamsStatsItem) UnmarshalJSON(data []byte) error { case "data_stream": if err := dec.Decode(&s.DataStream); err != nil { - return err + return fmt.Errorf("%s | %w", "DataStream", err) } case "maximum_timestamp": if err := dec.Decode(&s.MaximumTimestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaximumTimestamp", err) } case "store_size": if err := dec.Decode(&s.StoreSize); err != nil { - return err + return fmt.Errorf("%s | %w", "StoreSize", err) } case "store_size_bytes": - var tmp interface{} dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "StoreSizeBytes", err) } s.StoreSizeBytes = value case float64: - f := int(v) + f := int64(v) s.StoreSizeBytes = f } diff --git a/typedapi/types/datastreamtimestamp.go b/typedapi/types/datastreamtimestamp.go index a45464bd9e..5a7bd51985 100644 --- a/typedapi/types/datastreamtimestamp.go +++ b/typedapi/types/datastreamtimestamp.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataStreamTimestamp type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/TypeMapping.ts#L58-L60 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/TypeMapping.ts#L59-L61 type DataStreamTimestamp struct { Enabled bool `json:"enabled"` } @@ -57,7 +58,7 @@ func (s *DataStreamTimestamp) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: diff --git a/typedapi/types/datastreamtimestampfield.go b/typedapi/types/datastreamtimestampfield.go index bd50481a5f..f4177ac828 100644 --- a/typedapi/types/datastreamtimestampfield.go +++ b/typedapi/types/datastreamtimestampfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DataStreamTimestampField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/DataStream.ts#L114-L119 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/DataStream.ts#L114-L119 type DataStreamTimestampField struct { // Name Name of the timestamp field for the data stream, which must be `@timestamp`. // The `@timestamp` field must be included in every document indexed to the data @@ -54,7 +55,7 @@ func (s *DataStreamTimestampField) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/datastreamvisibility.go b/typedapi/types/datastreamvisibility.go index ddc2fbb70a..2cc6175244 100644 --- a/typedapi/types/datastreamvisibility.go +++ b/typedapi/types/datastreamvisibility.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataStreamVisibility type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/DataStream.ts#L144-L146 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/DataStream.ts#L144-L146 type DataStreamVisibility struct { Hidden *bool `json:"hidden,omitempty"` } @@ -57,7 +58,7 @@ func (s *DataStreamVisibility) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Hidden", err) } s.Hidden = &value case bool: diff --git a/typedapi/types/datastreamwithlifecycle.go b/typedapi/types/datastreamwithlifecycle.go index 4f27e55596..8f576256cb 100644 --- a/typedapi/types/datastreamwithlifecycle.go +++ b/typedapi/types/datastreamwithlifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DataStreamWithLifecycle type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get_data_lifecycle/IndicesGetDataLifecycleResponse.ts#L27-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get_data_lifecycle/IndicesGetDataLifecycleResponse.ts#L27-L30 type DataStreamWithLifecycle struct { Lifecycle *DataStreamLifecycle `json:"lifecycle,omitempty"` Name string `json:"name"` @@ -52,12 +53,12 @@ func (s *DataStreamWithLifecycle) UnmarshalJSON(data []byte) error { case "lifecycle": if err := dec.Decode(&s.Lifecycle); err != nil { - return err + return fmt.Errorf("%s | %w", "Lifecycle", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/datatierphasestatistics.go b/typedapi/types/datatierphasestatistics.go index 4b2cdb4a15..90b7155311 100644 --- a/typedapi/types/datatierphasestatistics.go +++ b/typedapi/types/datatierphasestatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataTierPhaseStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L86-L97 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L86-L97 type DataTierPhaseStatistics struct { DocCount int64 `json:"doc_count"` IndexCount int64 `json:"index_count"` @@ -66,7 +67,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -81,7 +82,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexCount", err) } s.IndexCount = value case float64: @@ -96,7 +97,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NodeCount", err) } s.NodeCount = value case float64: @@ -111,7 +112,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryShardCount", err) } s.PrimaryShardCount = value case float64: @@ -126,7 +127,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryShardSizeAvgBytes", err) } s.PrimaryShardSizeAvgBytes = value case float64: @@ -141,7 +142,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryShardSizeMadBytes", err) } s.PrimaryShardSizeMadBytes = value case float64: @@ -156,7 +157,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryShardSizeMedianBytes", err) } s.PrimaryShardSizeMedianBytes = value case float64: @@ -171,7 +172,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimarySizeBytes", err) } s.PrimarySizeBytes = value case float64: @@ -186,7 +187,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalShardCount", err) } s.TotalShardCount = value case float64: @@ -201,7 +202,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalSizeBytes", err) } s.TotalSizeBytes = value case float64: diff --git a/typedapi/types/datatiers.go b/typedapi/types/datatiers.go index 93aafe7bcd..101f3855ee 100644 --- a/typedapi/types/datatiers.go +++ b/typedapi/types/datatiers.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DataTiers type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L339-L349 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L339-L349 type DataTiers struct { Available bool `json:"available"` DataCold DataTierPhaseStatistics `json:"data_cold"` @@ -63,7 +64,7 @@ func (s *DataTiers) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -72,27 +73,27 @@ func (s *DataTiers) UnmarshalJSON(data []byte) error { case "data_cold": if err := dec.Decode(&s.DataCold); err != nil { - return err + return fmt.Errorf("%s | %w", "DataCold", err) } case "data_content": if err := dec.Decode(&s.DataContent); err != nil { - return err + return fmt.Errorf("%s | %w", "DataContent", err) } case "data_frozen": if err := dec.Decode(&s.DataFrozen); err != nil { - return err + return fmt.Errorf("%s | %w", "DataFrozen", err) } case "data_hot": if err := dec.Decode(&s.DataHot); err != nil { - return err + return fmt.Errorf("%s | %w", "DataHot", err) } case "data_warm": if err := dec.Decode(&s.DataWarm); err != nil { - return err + return fmt.Errorf("%s | %w", "DataWarm", err) } case "enabled": @@ -102,7 +103,7 @@ func (s *DataTiers) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: diff --git a/typedapi/types/datedecayfunction.go b/typedapi/types/datedecayfunction.go index f39ad65ab5..16af4c09c3 100644 --- a/typedapi/types/datedecayfunction.go +++ b/typedapi/types/datedecayfunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -29,7 +29,7 @@ import ( // DateDecayFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L186-L188 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L186-L188 type DateDecayFunction struct { DateDecayFunction map[string]DecayPlacementDateMathDuration `json:"DateDecayFunction,omitempty"` // MultiValueMode Determines how the distance is calculated when a field used for computing the diff --git a/typedapi/types/datedistancefeaturequery.go b/typedapi/types/datedistancefeaturequery.go index 923bb746d3..e4ebec7192 100644 --- a/typedapi/types/datedistancefeaturequery.go +++ b/typedapi/types/datedistancefeaturequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DateDistanceFeatureQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L67-L70 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L67-L70 type DateDistanceFeatureQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -83,7 +84,7 @@ func (s *DateDistanceFeatureQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -94,23 +95,23 @@ func (s *DateDistanceFeatureQuery) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "origin": if err := dec.Decode(&s.Origin); err != nil { - return err + return fmt.Errorf("%s | %w", "Origin", err) } case "pivot": if err := dec.Decode(&s.Pivot); err != nil { - return err + return fmt.Errorf("%s | %w", "Pivot", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/datehistogramaggregate.go b/typedapi/types/datehistogramaggregate.go index 9980cb55a9..ca70a0178d 100644 --- a/typedapi/types/datehistogramaggregate.go +++ b/typedapi/types/datehistogramaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DateHistogramAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L348-L349 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L348-L349 type DateHistogramAggregate struct { Buckets BucketsDateHistogramBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *DateHistogramAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]DateHistogramBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []DateHistogramBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/datehistogramaggregation.go b/typedapi/types/datehistogramaggregation.go index c11ef0bdcb..83c57c0dd3 100644 --- a/typedapi/types/datehistogramaggregation.go +++ b/typedapi/types/datehistogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // DateHistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L189-L247 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L189-L247 type DateHistogramAggregation struct { // CalendarInterval Calendar-aware interval. // Can be specified using the unit name, such as `month`, or as a single unit @@ -94,28 +95,28 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { case "calendar_interval": if err := dec.Decode(&s.CalendarInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "CalendarInterval", err) } case "extended_bounds": if err := dec.Decode(&s.ExtendedBounds); err != nil { - return err + return fmt.Errorf("%s | %w", "ExtendedBounds", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "fixed_interval": if err := dec.Decode(&s.FixedInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "FixedInterval", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -126,12 +127,12 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { case "hard_bounds": if err := dec.Decode(&s.HardBounds); err != nil { - return err + return fmt.Errorf("%s | %w", "HardBounds", err) } case "interval": if err := dec.Decode(&s.Interval); err != nil { - return err + return fmt.Errorf("%s | %w", "Interval", err) } case "keyed": @@ -141,7 +142,7 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Keyed", err) } s.Keyed = &value case bool: @@ -150,7 +151,7 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min_doc_count": @@ -161,7 +162,7 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocCount", err) } s.MinDocCount = &value case float64: @@ -171,13 +172,13 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -188,7 +189,7 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { case "offset": if err := dec.Decode(&s.Offset); err != nil { - return err + return fmt.Errorf("%s | %w", "Offset", err) } case "order": @@ -201,13 +202,13 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]sortorder.SortOrder, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } s.Order = o case '[': o := make([]map[string]sortorder.SortOrder, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } s.Order = o } @@ -217,13 +218,13 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -232,7 +233,7 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -241,7 +242,7 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -249,7 +250,7 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -258,7 +259,7 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { case "time_zone": if err := dec.Decode(&s.TimeZone); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeZone", err) } } diff --git a/typedapi/types/datehistogrambucket.go b/typedapi/types/datehistogrambucket.go index 00c2bddf1e..483f53db9b 100644 --- a/typedapi/types/datehistogrambucket.go +++ b/typedapi/types/datehistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // DateHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L351-L354 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L351-L354 type DateHistogramBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -62,7 +62,7 @@ func (s *DateHistogramBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -72,13 +72,13 @@ func (s *DateHistogramBucket) UnmarshalJSON(data []byte) error { case "key": if err := dec.Decode(&s.Key); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } case "key_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "KeyAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -101,490 +101,490 @@ func (s *DateHistogramBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -594,7 +594,7 @@ func (s *DateHistogramBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/datehistogramgrouping.go b/typedapi/types/datehistogramgrouping.go index da661e09bf..92cd413b06 100644 --- a/typedapi/types/datehistogramgrouping.go +++ b/typedapi/types/datehistogramgrouping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DateHistogramGrouping type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/_types/Groupings.ts#L42-L73 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/_types/Groupings.ts#L42-L73 type DateHistogramGrouping struct { // CalendarInterval The interval of time buckets to be generated when rolling up. CalendarInterval Duration `json:"calendar_interval,omitempty"` @@ -72,28 +73,28 @@ func (s *DateHistogramGrouping) UnmarshalJSON(data []byte) error { case "calendar_interval": if err := dec.Decode(&s.CalendarInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "CalendarInterval", err) } case "delay": if err := dec.Decode(&s.Delay); err != nil { - return err + return fmt.Errorf("%s | %w", "Delay", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "fixed_interval": if err := dec.Decode(&s.FixedInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "FixedInterval", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,12 +105,12 @@ func (s *DateHistogramGrouping) UnmarshalJSON(data []byte) error { case "interval": if err := dec.Decode(&s.Interval); err != nil { - return err + return fmt.Errorf("%s | %w", "Interval", err) } case "time_zone": if err := dec.Decode(&s.TimeZone); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeZone", err) } } diff --git a/typedapi/types/dateindexnameprocessor.go b/typedapi/types/dateindexnameprocessor.go index 29eaa16508..3727a1d8af 100644 --- a/typedapi/types/dateindexnameprocessor.go +++ b/typedapi/types/dateindexnameprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DateIndexNameProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L502-L540 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L502-L540 type DateIndexNameProcessor struct { // DateFormats An array of the expected date formats for parsing dates / timestamps in the // document being preprocessed. @@ -89,13 +90,13 @@ func (s *DateIndexNameProcessor) UnmarshalJSON(data []byte) error { case "date_formats": if err := dec.Decode(&s.DateFormats); err != nil { - return err + return fmt.Errorf("%s | %w", "DateFormats", err) } case "date_rounding": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DateRounding", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -107,7 +108,7 @@ func (s *DateIndexNameProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -118,13 +119,13 @@ func (s *DateIndexNameProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -140,7 +141,7 @@ func (s *DateIndexNameProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -150,7 +151,7 @@ func (s *DateIndexNameProcessor) UnmarshalJSON(data []byte) error { case "index_name_format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexNameFormat", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -162,7 +163,7 @@ func (s *DateIndexNameProcessor) UnmarshalJSON(data []byte) error { case "index_name_prefix": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexNamePrefix", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -174,7 +175,7 @@ func (s *DateIndexNameProcessor) UnmarshalJSON(data []byte) error { case "locale": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Locale", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -185,13 +186,13 @@ func (s *DateIndexNameProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -203,7 +204,7 @@ func (s *DateIndexNameProcessor) UnmarshalJSON(data []byte) error { case "timezone": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timezone", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/datenanosproperty.go b/typedapi/types/datenanosproperty.go index 4d782c9194..fc240760ab 100644 --- a/typedapi/types/datenanosproperty.go +++ b/typedapi/types/datenanosproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DateNanosProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L73-L81 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L73-L81 type DateNanosProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -75,7 +76,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -90,13 +91,13 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -107,7 +108,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -116,7 +117,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -429,7 +430,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -446,7 +447,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -461,7 +462,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -475,7 +476,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -487,12 +488,12 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": if err := dec.Decode(&s.NullValue); err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } case "precision_step": @@ -503,7 +504,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrecisionStep", err) } s.PrecisionStep = &value case float64: @@ -821,7 +822,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -837,7 +838,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -846,7 +847,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/dateprocessor.go b/typedapi/types/dateprocessor.go index f17d012ee5..36c236c7e6 100644 --- a/typedapi/types/dateprocessor.go +++ b/typedapi/types/dateprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DateProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L542-L569 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L542-L569 type DateProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -79,7 +80,7 @@ func (s *DateProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -90,18 +91,18 @@ func (s *DateProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "formats": if err := dec.Decode(&s.Formats); err != nil { - return err + return fmt.Errorf("%s | %w", "Formats", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -117,7 +118,7 @@ func (s *DateProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -127,7 +128,7 @@ func (s *DateProcessor) UnmarshalJSON(data []byte) error { case "locale": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Locale", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -138,13 +139,13 @@ func (s *DateProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -155,13 +156,13 @@ func (s *DateProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } case "timezone": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timezone", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/dateproperty.go b/typedapi/types/dateproperty.go index 36e9f845e2..798ffcc689 100644 --- a/typedapi/types/dateproperty.go +++ b/typedapi/types/dateproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DateProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L61-L71 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L61-L71 type DateProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -77,7 +78,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -92,13 +93,13 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -109,7 +110,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -118,12 +119,12 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fielddata": if err := dec.Decode(&s.Fielddata); err != nil { - return err + return fmt.Errorf("%s | %w", "Fielddata", err) } case "fields": @@ -436,7 +437,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -453,7 +454,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -468,7 +469,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -482,7 +483,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -492,7 +493,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case "locale": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Locale", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -506,12 +507,12 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": if err := dec.Decode(&s.NullValue); err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } case "precision_step": @@ -522,7 +523,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrecisionStep", err) } s.PrecisionStep = &value case float64: @@ -840,7 +841,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -856,7 +857,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -865,7 +866,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/daterangeaggregate.go b/typedapi/types/daterangeaggregate.go index 63b424f621..902e207764 100644 --- a/typedapi/types/daterangeaggregate.go +++ b/typedapi/types/daterangeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DateRangeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L543-L548 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L543-L548 type DateRangeAggregate struct { Buckets BucketsRangeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *DateRangeAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]RangeBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []RangeBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/daterangeaggregation.go b/typedapi/types/daterangeaggregation.go index 3ae43d1e88..bafaa8810e 100644 --- a/typedapi/types/daterangeaggregation.go +++ b/typedapi/types/daterangeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DateRangeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L268-L294 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L268-L294 type DateRangeAggregation struct { // Field The date field whose values are use to build ranges. Field *string `json:"field,omitempty"` @@ -67,13 +68,13 @@ func (s *DateRangeAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -89,7 +90,7 @@ func (s *DateRangeAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Keyed", err) } s.Keyed = &value case bool: @@ -98,18 +99,18 @@ func (s *DateRangeAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -120,12 +121,12 @@ func (s *DateRangeAggregation) UnmarshalJSON(data []byte) error { case "ranges": if err := dec.Decode(&s.Ranges); err != nil { - return err + return fmt.Errorf("%s | %w", "Ranges", err) } case "time_zone": if err := dec.Decode(&s.TimeZone); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeZone", err) } } diff --git a/typedapi/types/daterangeexpression.go b/typedapi/types/daterangeexpression.go index da319a1a71..b8c3979e65 100644 --- a/typedapi/types/daterangeexpression.go +++ b/typedapi/types/daterangeexpression.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DateRangeExpression type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L305-L318 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L305-L318 type DateRangeExpression struct { // From Start of the range (inclusive). From FieldDateMath `json:"from,omitempty"` @@ -57,13 +58,13 @@ func (s *DateRangeExpression) UnmarshalJSON(data []byte) error { case "from": if err := dec.Decode(&s.From); err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } case "key": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,7 +75,7 @@ func (s *DateRangeExpression) UnmarshalJSON(data []byte) error { case "to": if err := dec.Decode(&s.To); err != nil { - return err + return fmt.Errorf("%s | %w", "To", err) } } diff --git a/typedapi/types/daterangeproperty.go b/typedapi/types/daterangeproperty.go index 3dfdd4e927..14edda6500 100644 --- a/typedapi/types/daterangeproperty.go +++ b/typedapi/types/daterangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DateRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/range.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/range.ts#L29-L32 type DateRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -73,7 +74,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -89,7 +90,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -102,13 +103,13 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -119,7 +120,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -128,7 +129,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -441,7 +442,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -458,7 +459,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -473,7 +474,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -485,7 +486,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -798,7 +799,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -814,7 +815,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -823,7 +824,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/daterangequery.go b/typedapi/types/daterangequery.go index a1d6ac42ff..31d162846e 100644 --- a/typedapi/types/daterangequery.go +++ b/typedapi/types/daterangequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DateRangeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L116-L143 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L116-L143 type DateRangeQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -82,7 +83,7 @@ func (s *DateRangeQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -93,38 +94,38 @@ func (s *DateRangeQuery) UnmarshalJSON(data []byte) error { case "format": if err := dec.Decode(&s.Format); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } case "from": if err := dec.Decode(&s.From); err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } case "gt": if err := dec.Decode(&s.Gt); err != nil { - return err + return fmt.Errorf("%s | %w", "Gt", err) } case "gte": if err := dec.Decode(&s.Gte); err != nil { - return err + return fmt.Errorf("%s | %w", "Gte", err) } case "lt": if err := dec.Decode(&s.Lt); err != nil { - return err + return fmt.Errorf("%s | %w", "Lt", err) } case "lte": if err := dec.Decode(&s.Lte); err != nil { - return err + return fmt.Errorf("%s | %w", "Lte", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -135,17 +136,17 @@ func (s *DateRangeQuery) UnmarshalJSON(data []byte) error { case "relation": if err := dec.Decode(&s.Relation); err != nil { - return err + return fmt.Errorf("%s | %w", "Relation", err) } case "time_zone": if err := dec.Decode(&s.TimeZone); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeZone", err) } case "to": if err := dec.Decode(&s.To); err != nil { - return err + return fmt.Errorf("%s | %w", "To", err) } } diff --git a/typedapi/types/datetime.go b/typedapi/types/datetime.go index 8e6535c5ad..f6048d9134 100644 --- a/typedapi/types/datetime.go +++ b/typedapi/types/datetime.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // int64 // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Time.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Time.ts#L22-L27 type DateTime interface{} diff --git a/typedapi/types/decayfunction.go b/typedapi/types/decayfunction.go index 4b4a0d57e3..d2e76b1b9b 100644 --- a/typedapi/types/decayfunction.go +++ b/typedapi/types/decayfunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ package types // NumericDecayFunction // GeoDecayFunction // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L194-L199 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L194-L199 type DecayFunction interface{} diff --git a/typedapi/types/decayplacementdatemathduration.go b/typedapi/types/decayplacementdatemathduration.go index d46ff68d27..84002c0506 100644 --- a/typedapi/types/decayplacementdatemathduration.go +++ b/typedapi/types/decayplacementdatemathduration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DecayPlacementDateMathDuration type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L153-L172 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L153-L172 type DecayPlacementDateMathDuration struct { // Decay Defines how documents are scored at the distance given at scale. Decay *Float64 `json:"decay,omitempty"` @@ -67,7 +68,7 @@ func (s *DecayPlacementDateMathDuration) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Decay", err) } f := Float64(value) s.Decay = &f @@ -78,17 +79,17 @@ func (s *DecayPlacementDateMathDuration) UnmarshalJSON(data []byte) error { case "offset": if err := dec.Decode(&s.Offset); err != nil { - return err + return fmt.Errorf("%s | %w", "Offset", err) } case "origin": if err := dec.Decode(&s.Origin); err != nil { - return err + return fmt.Errorf("%s | %w", "Origin", err) } case "scale": if err := dec.Decode(&s.Scale); err != nil { - return err + return fmt.Errorf("%s | %w", "Scale", err) } } diff --git a/typedapi/types/decayplacementdoubledouble.go b/typedapi/types/decayplacementdoubledouble.go index 409bf4319f..733c811148 100644 --- a/typedapi/types/decayplacementdoubledouble.go +++ b/typedapi/types/decayplacementdoubledouble.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DecayPlacementdoubledouble type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L153-L172 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L153-L172 type DecayPlacementdoubledouble struct { // Decay Defines how documents are scored at the distance given at scale. Decay *Float64 `json:"decay,omitempty"` @@ -67,7 +68,7 @@ func (s *DecayPlacementdoubledouble) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Decay", err) } f := Float64(value) s.Decay = &f @@ -83,7 +84,7 @@ func (s *DecayPlacementdoubledouble) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Offset", err) } f := Float64(value) s.Offset = &f @@ -99,7 +100,7 @@ func (s *DecayPlacementdoubledouble) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Origin", err) } f := Float64(value) s.Origin = &f @@ -115,7 +116,7 @@ func (s *DecayPlacementdoubledouble) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Scale", err) } f := Float64(value) s.Scale = &f diff --git a/typedapi/types/decayplacementgeolocationdistance.go b/typedapi/types/decayplacementgeolocationdistance.go index 06de982052..cad1cd2731 100644 --- a/typedapi/types/decayplacementgeolocationdistance.go +++ b/typedapi/types/decayplacementgeolocationdistance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DecayPlacementGeoLocationDistance type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L153-L172 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L153-L172 type DecayPlacementGeoLocationDistance struct { // Decay Defines how documents are scored at the distance given at scale. Decay *Float64 `json:"decay,omitempty"` @@ -67,7 +68,7 @@ func (s *DecayPlacementGeoLocationDistance) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Decay", err) } f := Float64(value) s.Decay = &f @@ -78,17 +79,17 @@ func (s *DecayPlacementGeoLocationDistance) UnmarshalJSON(data []byte) error { case "offset": if err := dec.Decode(&s.Offset); err != nil { - return err + return fmt.Errorf("%s | %w", "Offset", err) } case "origin": if err := dec.Decode(&s.Origin); err != nil { - return err + return fmt.Errorf("%s | %w", "Origin", err) } case "scale": if err := dec.Decode(&s.Scale); err != nil { - return err + return fmt.Errorf("%s | %w", "Scale", err) } } diff --git a/typedapi/types/defaults.go b/typedapi/types/defaults.go index b52a7e25fc..65059249cc 100644 --- a/typedapi/types/defaults.go +++ b/typedapi/types/defaults.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Defaults type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/info/types.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/info/types.ts#L24-L27 type Defaults struct { AnomalyDetectors AnomalyDetectors `json:"anomaly_detectors"` Datafeeds Datafeeds `json:"datafeeds"` diff --git a/typedapi/types/definition.go b/typedapi/types/definition.go index 0df0f92faf..c4a9fbdc5c 100644 --- a/typedapi/types/definition.go +++ b/typedapi/types/definition.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Definition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/types.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/types.ts#L24-L29 type Definition struct { // Preprocessors Collection of preprocessors Preprocessors []Preprocessor `json:"preprocessors,omitempty"` diff --git a/typedapi/types/delayeddatacheckconfig.go b/typedapi/types/delayeddatacheckconfig.go index 1053e35190..6c1e372018 100644 --- a/typedapi/types/delayeddatacheckconfig.go +++ b/typedapi/types/delayeddatacheckconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DelayedDataCheckConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Datafeed.ts#L119-L130 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Datafeed.ts#L119-L130 type DelayedDataCheckConfig struct { // CheckWindow The window of time that is searched for late data. This window of time ends // with the latest finalized bucket. @@ -60,7 +61,7 @@ func (s *DelayedDataCheckConfig) UnmarshalJSON(data []byte) error { case "check_window": if err := dec.Decode(&s.CheckWindow); err != nil { - return err + return fmt.Errorf("%s | %w", "CheckWindow", err) } case "enabled": @@ -70,7 +71,7 @@ func (s *DelayedDataCheckConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: diff --git a/typedapi/types/deleteoperation.go b/typedapi/types/deleteoperation.go index c6e2df8e17..88216ff22b 100644 --- a/typedapi/types/deleteoperation.go +++ b/typedapi/types/deleteoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DeleteOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/bulk/types.ts#L134-L134 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/bulk/types.ts#L134-L134 type DeleteOperation struct { // Id_ The document ID. Id_ *string `json:"_id,omitempty"` @@ -63,7 +64,7 @@ func (s *DeleteOperation) UnmarshalJSON(data []byte) error { case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "if_primary_term": @@ -73,7 +74,7 @@ func (s *DeleteOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IfPrimaryTerm", err) } s.IfPrimaryTerm = &value case float64: @@ -83,27 +84,27 @@ func (s *DeleteOperation) UnmarshalJSON(data []byte) error { case "if_seq_no": if err := dec.Decode(&s.IfSeqNo); err != nil { - return err + return fmt.Errorf("%s | %w", "IfSeqNo", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "version_type": if err := dec.Decode(&s.VersionType); err != nil { - return err + return fmt.Errorf("%s | %w", "VersionType", err) } } diff --git a/typedapi/types/delimitedpayloadtokenfilter.go b/typedapi/types/delimitedpayloadtokenfilter.go index 319e9a1790..ad15458c6f 100644 --- a/typedapi/types/delimitedpayloadtokenfilter.go +++ b/typedapi/types/delimitedpayloadtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DelimitedPayloadTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L68-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L68-L72 type DelimitedPayloadTokenFilter struct { Delimiter *string `json:"delimiter,omitempty"` Encoding *delimitedpayloadencoding.DelimitedPayloadEncoding `json:"encoding,omitempty"` @@ -58,7 +59,7 @@ func (s *DelimitedPayloadTokenFilter) UnmarshalJSON(data []byte) error { case "delimiter": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Delimiter", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -69,17 +70,17 @@ func (s *DelimitedPayloadTokenFilter) UnmarshalJSON(data []byte) error { case "encoding": if err := dec.Decode(&s.Encoding); err != nil { - return err + return fmt.Errorf("%s | %w", "Encoding", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/densevectorindexoptions.go b/typedapi/types/densevectorindexoptions.go index 7fa304ac0f..d428d8537c 100644 --- a/typedapi/types/densevectorindexoptions.go +++ b/typedapi/types/densevectorindexoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DenseVectorIndexOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/DenseVectorIndexOptions.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/DenseVectorIndexOptions.ts#L22-L26 type DenseVectorIndexOptions struct { EfConstruction int `json:"ef_construction"` M int `json:"m"` @@ -60,7 +61,7 @@ func (s *DenseVectorIndexOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EfConstruction", err) } s.EfConstruction = value case float64: @@ -76,7 +77,7 @@ func (s *DenseVectorIndexOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "M", err) } s.M = value case float64: @@ -87,7 +88,7 @@ func (s *DenseVectorIndexOptions) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/densevectorproperty.go b/typedapi/types/densevectorproperty.go index 1de92558cf..dbbe4ca07b 100644 --- a/typedapi/types/densevectorproperty.go +++ b/typedapi/types/densevectorproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,9 +33,9 @@ import ( // DenseVectorProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/complex.ts#L51-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/complex.ts#L52-L58 type DenseVectorProperty struct { - Dims int `json:"dims"` + Dims *int `json:"dims,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` @@ -70,17 +71,17 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Dims", err) } - s.Dims = value + s.Dims = &value case float64: f := int(v) - s.Dims = f + s.Dims = &f } case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -398,7 +399,7 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -413,7 +414,7 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -422,7 +423,7 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { case "index_options": if err := dec.Decode(&s.IndexOptions); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexOptions", err) } case "meta": @@ -430,7 +431,7 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -743,7 +744,7 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -754,7 +755,7 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/deprecation.go b/typedapi/types/deprecation.go index 7910d7afbf..4db9c6e82d 100644 --- a/typedapi/types/deprecation.go +++ b/typedapi/types/deprecation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // Deprecation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/migration/deprecations/types.ts#L29-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/migration/deprecations/types.ts#L29-L35 type Deprecation struct { Details string `json:"details"` // Level The level property describes the significance of the issue. @@ -59,7 +60,7 @@ func (s *Deprecation) UnmarshalJSON(data []byte) error { case "details": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -70,13 +71,13 @@ func (s *Deprecation) UnmarshalJSON(data []byte) error { case "level": if err := dec.Decode(&s.Level); err != nil { - return err + return fmt.Errorf("%s | %w", "Level", err) } case "message": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Message", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -88,7 +89,7 @@ func (s *Deprecation) UnmarshalJSON(data []byte) error { case "url": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Url", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/deprecationindexing.go b/typedapi/types/deprecationindexing.go index f3b5002ffe..6a966dff1d 100644 --- a/typedapi/types/deprecationindexing.go +++ b/typedapi/types/deprecationindexing.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DeprecationIndexing type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L144-L146 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L144-L146 type DeprecationIndexing struct { Enabled string `json:"enabled"` } @@ -53,7 +54,7 @@ func (s *DeprecationIndexing) UnmarshalJSON(data []byte) error { case "enabled": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/derivativeaggregate.go b/typedapi/types/derivativeaggregate.go index febf24ebfc..7c10756729 100644 --- a/typedapi/types/derivativeaggregate.go +++ b/typedapi/types/derivativeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DerivativeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L227-L231 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L227-L231 type DerivativeAggregate struct { Meta Metadata `json:"meta,omitempty"` NormalizedValue *Float64 `json:"normalized_value,omitempty"` @@ -59,7 +60,7 @@ func (s *DerivativeAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "normalized_value": @@ -69,7 +70,7 @@ func (s *DerivativeAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NormalizedValue", err) } f := Float64(value) s.NormalizedValue = &f @@ -81,7 +82,7 @@ func (s *DerivativeAggregate) UnmarshalJSON(data []byte) error { case "normalized_value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NormalizedValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -92,13 +93,13 @@ func (s *DerivativeAggregate) UnmarshalJSON(data []byte) error { case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/derivativeaggregation.go b/typedapi/types/derivativeaggregation.go index f1e8f608b6..9b8f69254a 100644 --- a/typedapi/types/derivativeaggregation.go +++ b/typedapi/types/derivativeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DerivativeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L196-L196 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L196-L196 type DerivativeAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -63,13 +64,13 @@ func (s *DerivativeAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,18 +81,18 @@ func (s *DerivativeAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/detectionrule.go b/typedapi/types/detectionrule.go index 8a66be2935..5ff0bc717a 100644 --- a/typedapi/types/detectionrule.go +++ b/typedapi/types/detectionrule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // DetectionRule type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Rule.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Rule.ts#L25-L39 type DetectionRule struct { // Actions The set of actions to be triggered when the rule applies. If more than one // action is specified the effects of all actions are combined. diff --git a/typedapi/types/detector.go b/typedapi/types/detector.go index 04baafce6e..9a12cecf43 100644 --- a/typedapi/types/detector.go +++ b/typedapi/types/detector.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // Detector type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Detector.ts#L25-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Detector.ts#L25-L67 type Detector struct { // ByFieldName The field used to split the data. In particular, this property is used for // analyzing the splits with respect to their own history. It is used for @@ -89,18 +90,18 @@ func (s *Detector) UnmarshalJSON(data []byte) error { case "by_field_name": if err := dec.Decode(&s.ByFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "ByFieldName", err) } case "custom_rules": if err := dec.Decode(&s.CustomRules); err != nil { - return err + return fmt.Errorf("%s | %w", "CustomRules", err) } case "detector_description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DetectorDescription", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -117,7 +118,7 @@ func (s *Detector) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DetectorIndex", err) } s.DetectorIndex = &value case float64: @@ -127,18 +128,18 @@ func (s *Detector) UnmarshalJSON(data []byte) error { case "exclude_frequent": if err := dec.Decode(&s.ExcludeFrequent); err != nil { - return err + return fmt.Errorf("%s | %w", "ExcludeFrequent", err) } case "field_name": if err := dec.Decode(&s.FieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldName", err) } case "function": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Function", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -149,12 +150,12 @@ func (s *Detector) UnmarshalJSON(data []byte) error { case "over_field_name": if err := dec.Decode(&s.OverFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "OverFieldName", err) } case "partition_field_name": if err := dec.Decode(&s.PartitionFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "PartitionFieldName", err) } case "use_null": @@ -164,7 +165,7 @@ func (s *Detector) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UseNull", err) } s.UseNull = &value case bool: diff --git a/typedapi/types/detectorread.go b/typedapi/types/detectorread.go index 4880074311..870d00783e 100644 --- a/typedapi/types/detectorread.go +++ b/typedapi/types/detectorread.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DetectorRead type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Detector.ts#L69-L125 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Detector.ts#L69-L125 type DetectorRead struct { // ByFieldName The field used to split the data. // In particular, this property is used for analyzing the splits with respect to @@ -96,18 +97,18 @@ func (s *DetectorRead) UnmarshalJSON(data []byte) error { case "by_field_name": if err := dec.Decode(&s.ByFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "ByFieldName", err) } case "custom_rules": if err := dec.Decode(&s.CustomRules); err != nil { - return err + return fmt.Errorf("%s | %w", "CustomRules", err) } case "detector_description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DetectorDescription", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -124,7 +125,7 @@ func (s *DetectorRead) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DetectorIndex", err) } s.DetectorIndex = &value case float64: @@ -134,18 +135,18 @@ func (s *DetectorRead) UnmarshalJSON(data []byte) error { case "exclude_frequent": if err := dec.Decode(&s.ExcludeFrequent); err != nil { - return err + return fmt.Errorf("%s | %w", "ExcludeFrequent", err) } case "field_name": if err := dec.Decode(&s.FieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldName", err) } case "function": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Function", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -156,12 +157,12 @@ func (s *DetectorRead) UnmarshalJSON(data []byte) error { case "over_field_name": if err := dec.Decode(&s.OverFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "OverFieldName", err) } case "partition_field_name": if err := dec.Decode(&s.PartitionFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "PartitionFieldName", err) } case "use_null": @@ -171,7 +172,7 @@ func (s *DetectorRead) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UseNull", err) } s.UseNull = &value case bool: diff --git a/typedapi/types/diagnosis.go b/typedapi/types/diagnosis.go index 69eaa5e6ca..9315a72b63 100644 --- a/typedapi/types/diagnosis.go +++ b/typedapi/types/diagnosis.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Diagnosis type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L49-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L49-L55 type Diagnosis struct { Action string `json:"action"` AffectedResources DiagnosisAffectedResources `json:"affected_resources"` @@ -57,7 +58,7 @@ func (s *Diagnosis) UnmarshalJSON(data []byte) error { case "action": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Action", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,13 +69,13 @@ func (s *Diagnosis) UnmarshalJSON(data []byte) error { case "affected_resources": if err := dec.Decode(&s.AffectedResources); err != nil { - return err + return fmt.Errorf("%s | %w", "AffectedResources", err) } case "cause": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Cause", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -86,7 +87,7 @@ func (s *Diagnosis) UnmarshalJSON(data []byte) error { case "help_url": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "HelpUrl", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -98,7 +99,7 @@ func (s *Diagnosis) UnmarshalJSON(data []byte) error { case "id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/diagnosisaffectedresources.go b/typedapi/types/diagnosisaffectedresources.go index 1df765a916..a45a081d2f 100644 --- a/typedapi/types/diagnosisaffectedresources.go +++ b/typedapi/types/diagnosisaffectedresources.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DiagnosisAffectedResources type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L57-L63 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L57-L63 type DiagnosisAffectedResources struct { FeatureStates []string `json:"feature_states,omitempty"` Indices []string `json:"indices,omitempty"` @@ -55,7 +56,7 @@ func (s *DiagnosisAffectedResources) UnmarshalJSON(data []byte) error { case "feature_states": if err := dec.Decode(&s.FeatureStates); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureStates", err) } case "indices": @@ -64,29 +65,29 @@ func (s *DiagnosisAffectedResources) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } case "nodes": if err := dec.Decode(&s.Nodes); err != nil { - return err + return fmt.Errorf("%s | %w", "Nodes", err) } case "slm_policies": if err := dec.Decode(&s.SlmPolicies); err != nil { - return err + return fmt.Errorf("%s | %w", "SlmPolicies", err) } case "snapshot_repositories": if err := dec.Decode(&s.SnapshotRepositories); err != nil { - return err + return fmt.Errorf("%s | %w", "SnapshotRepositories", err) } } diff --git a/typedapi/types/dictionarydecompoundertokenfilter.go b/typedapi/types/dictionarydecompoundertokenfilter.go index c83f66b945..651cbe4390 100644 --- a/typedapi/types/dictionarydecompoundertokenfilter.go +++ b/typedapi/types/dictionarydecompoundertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DictionaryDecompounderTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L54-L56 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L54-L56 type DictionaryDecompounderTokenFilter struct { HyphenationPatternsPath *string `json:"hyphenation_patterns_path,omitempty"` MaxSubwordSize *int `json:"max_subword_size,omitempty"` @@ -61,7 +62,7 @@ func (s *DictionaryDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case "hyphenation_patterns_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "HyphenationPatternsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,7 +79,7 @@ func (s *DictionaryDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxSubwordSize", err) } s.MaxSubwordSize = &value case float64: @@ -94,7 +95,7 @@ func (s *DictionaryDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinSubwordSize", err) } s.MinSubwordSize = &value case float64: @@ -110,7 +111,7 @@ func (s *DictionaryDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinWordSize", err) } s.MinWordSize = &value case float64: @@ -125,7 +126,7 @@ func (s *DictionaryDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OnlyLongestMatch", err) } s.OnlyLongestMatch = &value case bool: @@ -134,23 +135,23 @@ func (s *DictionaryDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "word_list": if err := dec.Decode(&s.WordList); err != nil { - return err + return fmt.Errorf("%s | %w", "WordList", err) } case "word_list_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "WordListPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/directgenerator.go b/typedapi/types/directgenerator.go index bcc223dd9c..4ab0fc6b08 100644 --- a/typedapi/types/directgenerator.go +++ b/typedapi/types/directgenerator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DirectGenerator type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L265-L328 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L265-L328 type DirectGenerator struct { // Field The field to fetch the candidate suggestions from. // Needs to be set globally or per suggestion. @@ -96,7 +97,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "max_edits": @@ -107,7 +108,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxEdits", err) } s.MaxEdits = &value case float64: @@ -122,7 +123,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxInspections", err) } f := float32(value) s.MaxInspections = &f @@ -138,7 +139,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTermFreq", err) } f := float32(value) s.MaxTermFreq = &f @@ -154,7 +155,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocFreq", err) } f := float32(value) s.MinDocFreq = &f @@ -171,7 +172,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinWordLength", err) } s.MinWordLength = &value case float64: @@ -182,7 +183,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case "post_filter": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PostFilter", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -194,7 +195,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case "pre_filter": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PreFilter", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -211,7 +212,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrefixLength", err) } s.PrefixLength = &value case float64: @@ -227,7 +228,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -237,7 +238,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case "suggest_mode": if err := dec.Decode(&s.SuggestMode); err != nil { - return err + return fmt.Errorf("%s | %w", "SuggestMode", err) } } diff --git a/typedapi/types/discovery.go b/typedapi/types/discovery.go index 6db52552ce..8b52971521 100644 --- a/typedapi/types/discovery.go +++ b/typedapi/types/discovery.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Discovery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L201-L219 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L201-L219 type Discovery struct { ClusterApplierStats *ClusterAppliedStats `json:"cluster_applier_stats,omitempty"` // ClusterStateQueue Contains statistics for the cluster state queue of the node. diff --git a/typedapi/types/discoverynode.go b/typedapi/types/discoverynode.go index 6b19e66376..03f007e6fd 100644 --- a/typedapi/types/discoverynode.go +++ b/typedapi/types/discoverynode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DiscoveryNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DiscoveryNode.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DiscoveryNode.ts#L24-L30 type DiscoveryNode struct { Attributes map[string]string `json:"attributes"` EphemeralId string `json:"ephemeral_id"` @@ -58,27 +59,27 @@ func (s *DiscoveryNode) UnmarshalJSON(data []byte) error { s.Attributes = make(map[string]string, 0) } if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "ephemeral_id": if err := dec.Decode(&s.EphemeralId); err != nil { - return err + return fmt.Errorf("%s | %w", "EphemeralId", err) } case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "transport_address": if err := dec.Decode(&s.TransportAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "TransportAddress", err) } } diff --git a/typedapi/types/diskindicator.go b/typedapi/types/diskindicator.go index 296e313b7d..663b8d6076 100644 --- a/typedapi/types/diskindicator.go +++ b/typedapi/types/diskindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DiskIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L121-L125 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L121-L125 type DiskIndicator struct { Details *DiskIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` @@ -58,28 +59,28 @@ func (s *DiskIndicator) UnmarshalJSON(data []byte) error { case "details": if err := dec.Decode(&s.Details); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } case "diagnosis": if err := dec.Decode(&s.Diagnosis); err != nil { - return err + return fmt.Errorf("%s | %w", "Diagnosis", err) } case "impacts": if err := dec.Decode(&s.Impacts); err != nil { - return err + return fmt.Errorf("%s | %w", "Impacts", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "symptom": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Symptom", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/diskindicatordetails.go b/typedapi/types/diskindicatordetails.go index fbd5f96247..db4560cb5c 100644 --- a/typedapi/types/diskindicatordetails.go +++ b/typedapi/types/diskindicatordetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DiskIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L126-L132 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L126-L132 type DiskIndicatorDetails struct { IndicesWithReadonlyBlock int64 `json:"indices_with_readonly_block"` NodesOverFloodStageWatermark int64 `json:"nodes_over_flood_stage_watermark"` @@ -61,7 +62,7 @@ func (s *DiskIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesWithReadonlyBlock", err) } s.IndicesWithReadonlyBlock = value case float64: @@ -76,7 +77,7 @@ func (s *DiskIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NodesOverFloodStageWatermark", err) } s.NodesOverFloodStageWatermark = value case float64: @@ -91,7 +92,7 @@ func (s *DiskIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NodesOverHighWatermark", err) } s.NodesOverHighWatermark = value case float64: @@ -106,7 +107,7 @@ func (s *DiskIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NodesWithEnoughDiskSpace", err) } s.NodesWithEnoughDiskSpace = value case float64: @@ -121,7 +122,7 @@ func (s *DiskIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NodesWithUnknownDiskStatus", err) } s.NodesWithUnknownDiskStatus = value case float64: diff --git a/typedapi/types/diskusage.go b/typedapi/types/diskusage.go index c64fd609ea..c16de97988 100644 --- a/typedapi/types/diskusage.go +++ b/typedapi/types/diskusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DiskUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/types.ts#L62-L69 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/types.ts#L62-L69 type DiskUsage struct { FreeBytes int64 `json:"free_bytes"` FreeDiskPercent Float64 `json:"free_disk_percent"` @@ -62,7 +63,7 @@ func (s *DiskUsage) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FreeBytes", err) } s.FreeBytes = value case float64: @@ -77,7 +78,7 @@ func (s *DiskUsage) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FreeDiskPercent", err) } f := Float64(value) s.FreeDiskPercent = f @@ -89,7 +90,7 @@ func (s *DiskUsage) UnmarshalJSON(data []byte) error { case "path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +106,7 @@ func (s *DiskUsage) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalBytes", err) } s.TotalBytes = value case float64: @@ -120,7 +121,7 @@ func (s *DiskUsage) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UsedBytes", err) } s.UsedBytes = value case float64: @@ -135,7 +136,7 @@ func (s *DiskUsage) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UsedDiskPercent", err) } f := Float64(value) s.UsedDiskPercent = f diff --git a/typedapi/types/dismaxquery.go b/typedapi/types/dismaxquery.go index 755dae2469..7712ef3051 100644 --- a/typedapi/types/dismaxquery.go +++ b/typedapi/types/dismaxquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DisMaxQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L78-L90 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L78-L90 type DisMaxQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -71,7 +72,7 @@ func (s *DisMaxQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -82,13 +83,13 @@ func (s *DisMaxQuery) UnmarshalJSON(data []byte) error { case "queries": if err := dec.Decode(&s.Queries); err != nil { - return err + return fmt.Errorf("%s | %w", "Queries", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,7 +105,7 @@ func (s *DisMaxQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TieBreaker", err) } f := Float64(value) s.TieBreaker = &f diff --git a/typedapi/types/dissectprocessor.go b/typedapi/types/dissectprocessor.go index 22a274e550..dbba5daa1c 100644 --- a/typedapi/types/dissectprocessor.go +++ b/typedapi/types/dissectprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DissectProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L571-L590 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L571-L590 type DissectProcessor struct { // AppendSeparator The character(s) that separate the appended fields. AppendSeparator *string `json:"append_separator,omitempty"` @@ -73,7 +74,7 @@ func (s *DissectProcessor) UnmarshalJSON(data []byte) error { case "append_separator": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AppendSeparator", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,7 +86,7 @@ func (s *DissectProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -96,13 +97,13 @@ func (s *DissectProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -118,7 +119,7 @@ func (s *DissectProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -132,7 +133,7 @@ func (s *DissectProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -141,13 +142,13 @@ func (s *DissectProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -159,7 +160,7 @@ func (s *DissectProcessor) UnmarshalJSON(data []byte) error { case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/distancefeaturequery.go b/typedapi/types/distancefeaturequery.go index 2d89633f01..d4af8afb6a 100644 --- a/typedapi/types/distancefeaturequery.go +++ b/typedapi/types/distancefeaturequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // GeoDistanceFeatureQuery // DateDistanceFeatureQuery // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L72-L76 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L72-L76 type DistanceFeatureQuery interface{} diff --git a/typedapi/types/distancefeaturequerybasedatemathduration.go b/typedapi/types/distancefeaturequerybasedatemathduration.go index ff1f5a6ae2..c2dc9aa28c 100644 --- a/typedapi/types/distancefeaturequerybasedatemathduration.go +++ b/typedapi/types/distancefeaturequerybasedatemathduration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DistanceFeatureQueryBaseDateMathDuration type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L40-L60 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L40-L60 type DistanceFeatureQueryBaseDateMathDuration struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -83,7 +84,7 @@ func (s *DistanceFeatureQueryBaseDateMathDuration) UnmarshalJSON(data []byte) er case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -94,23 +95,23 @@ func (s *DistanceFeatureQueryBaseDateMathDuration) UnmarshalJSON(data []byte) er case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "origin": if err := dec.Decode(&s.Origin); err != nil { - return err + return fmt.Errorf("%s | %w", "Origin", err) } case "pivot": if err := dec.Decode(&s.Pivot); err != nil { - return err + return fmt.Errorf("%s | %w", "Pivot", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/distancefeaturequerybasegeolocationdistance.go b/typedapi/types/distancefeaturequerybasegeolocationdistance.go index 142c8204dc..4ce8712b1d 100644 --- a/typedapi/types/distancefeaturequerybasegeolocationdistance.go +++ b/typedapi/types/distancefeaturequerybasegeolocationdistance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DistanceFeatureQueryBaseGeoLocationDistance type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L40-L60 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L40-L60 type DistanceFeatureQueryBaseGeoLocationDistance struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -83,7 +84,7 @@ func (s *DistanceFeatureQueryBaseGeoLocationDistance) UnmarshalJSON(data []byte) case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -94,23 +95,23 @@ func (s *DistanceFeatureQueryBaseGeoLocationDistance) UnmarshalJSON(data []byte) case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "origin": if err := dec.Decode(&s.Origin); err != nil { - return err + return fmt.Errorf("%s | %w", "Origin", err) } case "pivot": if err := dec.Decode(&s.Pivot); err != nil { - return err + return fmt.Errorf("%s | %w", "Pivot", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/diversifiedsampleraggregation.go b/typedapi/types/diversifiedsampleraggregation.go index 804a1ee9b6..390eb15933 100644 --- a/typedapi/types/diversifiedsampleraggregation.go +++ b/typedapi/types/diversifiedsampleraggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DiversifiedSamplerAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L320-L341 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L320-L341 type DiversifiedSamplerAggregation struct { // ExecutionHint The type of value used for de-duplication. ExecutionHint *sampleraggregationexecutionhint.SamplerAggregationExecutionHint `json:"execution_hint,omitempty"` @@ -65,12 +66,12 @@ func (s *DiversifiedSamplerAggregation) UnmarshalJSON(data []byte) error { case "execution_hint": if err := dec.Decode(&s.ExecutionHint); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutionHint", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "max_docs_per_value": @@ -81,7 +82,7 @@ func (s *DiversifiedSamplerAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxDocsPerValue", err) } s.MaxDocsPerValue = &value case float64: @@ -91,13 +92,13 @@ func (s *DiversifiedSamplerAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,7 +110,7 @@ func (s *DiversifiedSamplerAggregation) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -118,7 +119,7 @@ func (s *DiversifiedSamplerAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -127,7 +128,7 @@ func (s *DiversifiedSamplerAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -135,7 +136,7 @@ func (s *DiversifiedSamplerAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -150,7 +151,7 @@ func (s *DiversifiedSamplerAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardSize", err) } s.ShardSize = &value case float64: diff --git a/typedapi/types/docstats.go b/typedapi/types/docstats.go index 5c991b9531..bc481072e8 100644 --- a/typedapi/types/docstats.go +++ b/typedapi/types/docstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DocStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L97-L109 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L97-L109 type DocStats struct { // Count Total number of non-deleted documents across all primary shards assigned to // selected nodes. @@ -67,7 +68,7 @@ func (s *DocStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -82,7 +83,7 @@ func (s *DocStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Deleted", err) } s.Deleted = &value case float64: diff --git a/typedapi/types/document.go b/typedapi/types/document.go index 8d68fc3dfe..a79dfc9c7f 100644 --- a/typedapi/types/document.go +++ b/typedapi/types/document.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Document type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/simulate/types.ts#L41-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/simulate/types.ts#L41-L55 type Document struct { // Id_ Unique identifier for the document. // This ID must be unique within the `_index`. @@ -57,17 +58,17 @@ func (s *Document) UnmarshalJSON(data []byte) error { case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } } diff --git a/typedapi/types/documentrating.go b/typedapi/types/documentrating.go index 8facb7d527..e5a03b65d9 100644 --- a/typedapi/types/documentrating.go +++ b/typedapi/types/documentrating.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DocumentRating type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L116-L123 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L116-L123 type DocumentRating struct { // Id_ The document ID. Id_ string `json:"_id"` @@ -58,12 +59,12 @@ func (s *DocumentRating) UnmarshalJSON(data []byte) error { case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "rating": @@ -74,7 +75,7 @@ func (s *DocumentRating) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Rating", err) } s.Rating = value case float64: diff --git a/typedapi/types/documentsimulation.go b/typedapi/types/documentsimulation.go index af56c13895..a2416e1598 100644 --- a/typedapi/types/documentsimulation.go +++ b/typedapi/types/documentsimulation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -33,7 +33,7 @@ import ( // DocumentSimulation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/simulate/types.ts#L57-L85 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/simulate/types.ts#L57-L85 type DocumentSimulation struct { DocumentSimulation map[string]string `json:"-"` // Id_ Unique identifier for the document. This ID must be unique within the @@ -67,23 +67,23 @@ func (s *DocumentSimulation) UnmarshalJSON(data []byte) error { case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "_ingest": if err := dec.Decode(&s.Ingest_); err != nil { - return err + return fmt.Errorf("%s | %w", "Ingest_", err) } case "_routing": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -97,17 +97,17 @@ func (s *DocumentSimulation) UnmarshalJSON(data []byte) error { s.Source_ = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } case "_version_type": if err := dec.Decode(&s.VersionType_); err != nil { - return err + return fmt.Errorf("%s | %w", "VersionType_", err) } case "_version": if err := dec.Decode(&s.Version_); err != nil { - return err + return fmt.Errorf("%s | %w", "Version_", err) } default: @@ -118,7 +118,7 @@ func (s *DocumentSimulation) UnmarshalJSON(data []byte) error { } raw := new(string) if err := dec.Decode(&raw); err != nil { - return err + return fmt.Errorf("%s | %w", "DocumentSimulation", err) } s.DocumentSimulation[key] = *raw } diff --git a/typedapi/types/dotexpanderprocessor.go b/typedapi/types/dotexpanderprocessor.go index 7f41437f06..350ae4a5d1 100644 --- a/typedapi/types/dotexpanderprocessor.go +++ b/typedapi/types/dotexpanderprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DotExpanderProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L592-L603 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L592-L603 type DotExpanderProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -71,7 +72,7 @@ func (s *DotExpanderProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,13 +83,13 @@ func (s *DotExpanderProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,7 +105,7 @@ func (s *DotExpanderProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -113,13 +114,13 @@ func (s *DotExpanderProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -131,7 +132,7 @@ func (s *DotExpanderProcessor) UnmarshalJSON(data []byte) error { case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/doublenumberproperty.go b/typedapi/types/doublenumberproperty.go index 394f324de4..9af60e756c 100644 --- a/typedapi/types/doublenumberproperty.go +++ b/typedapi/types/doublenumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // DoubleNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L144-L147 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L144-L147 type DoubleNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -84,7 +85,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -100,7 +101,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -113,13 +114,13 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -130,7 +131,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -139,7 +140,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -457,7 +458,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -472,7 +473,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -486,7 +487,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -498,7 +499,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": @@ -508,7 +509,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } f := Float64(value) s.NullValue = &f @@ -519,7 +520,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case "on_script_error": if err := dec.Decode(&s.OnScriptError); err != nil { - return err + return fmt.Errorf("%s | %w", "OnScriptError", err) } case "properties": @@ -832,7 +833,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -841,7 +842,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -850,7 +851,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -858,7 +859,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -868,7 +869,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -884,7 +885,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -898,7 +899,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesDimension", err) } s.TimeSeriesDimension = &value case bool: @@ -907,12 +908,12 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case "time_series_metric": if err := dec.Decode(&s.TimeSeriesMetric); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesMetric", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/doublerangeproperty.go b/typedapi/types/doublerangeproperty.go index 4c414ef66a..2c9cbf0e7d 100644 --- a/typedapi/types/doublerangeproperty.go +++ b/typedapi/types/doublerangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DoubleRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/range.ts#L34-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/range.ts#L34-L36 type DoubleRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -72,7 +73,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -88,7 +89,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -101,13 +102,13 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -118,7 +119,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -127,7 +128,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -445,7 +446,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -460,7 +461,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -472,7 +473,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -785,7 +786,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -801,7 +802,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -810,7 +811,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/doubletermsaggregate.go b/typedapi/types/doubletermsaggregate.go index c69be97b7f..2b28e22127 100644 --- a/typedapi/types/doubletermsaggregate.go +++ b/typedapi/types/doubletermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DoubleTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L411-L416 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L411-L416 type DoubleTermsAggregate struct { Buckets BucketsDoubleTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -63,13 +64,13 @@ func (s *DoubleTermsAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]DoubleTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []DoubleTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -81,7 +82,7 @@ func (s *DoubleTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } s.DocCountErrorUpperBound = &value case float64: @@ -91,7 +92,7 @@ func (s *DoubleTermsAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "sum_other_doc_count": @@ -101,7 +102,7 @@ func (s *DoubleTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SumOtherDocCount", err) } s.SumOtherDocCount = &value case float64: diff --git a/typedapi/types/doubletermsbucket.go b/typedapi/types/doubletermsbucket.go index c90171e74b..97c51cdd51 100644 --- a/typedapi/types/doubletermsbucket.go +++ b/typedapi/types/doubletermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // DoubleTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L418-L421 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L418-L421 type DoubleTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -63,7 +63,7 @@ func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -78,7 +78,7 @@ func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountError", err) } s.DocCountError = &value case float64: @@ -93,7 +93,7 @@ func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } f := Float64(value) s.Key = f @@ -105,7 +105,7 @@ func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { case "key_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "KeyAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -128,490 +128,490 @@ func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -621,7 +621,7 @@ func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/downsampleconfig.go b/typedapi/types/downsampleconfig.go index 3f3de11144..41f36c803f 100644 --- a/typedapi/types/downsampleconfig.go +++ b/typedapi/types/downsampleconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DownsampleConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/Downsample.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/Downsample.ts#L22-L27 type DownsampleConfig struct { // FixedInterval The interval at which to aggregate the original time series index. FixedInterval string `json:"fixed_interval"` @@ -52,7 +53,7 @@ func (s *DownsampleConfig) UnmarshalJSON(data []byte) error { case "fixed_interval": if err := dec.Decode(&s.FixedInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "FixedInterval", err) } } diff --git a/typedapi/types/downsamplinground.go b/typedapi/types/downsamplinground.go index acdd8ba1ed..f497b28c84 100644 --- a/typedapi/types/downsamplinground.go +++ b/typedapi/types/downsamplinground.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DownsamplingRound type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/DownsamplingRound.ts#L23-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/DownsamplingRound.ts#L23-L32 type DownsamplingRound struct { // After The duration since rollover when this downsampling round should execute After Duration `json:"after"` @@ -54,12 +55,12 @@ func (s *DownsamplingRound) UnmarshalJSON(data []byte) error { case "after": if err := dec.Decode(&s.After); err != nil { - return err + return fmt.Errorf("%s | %w", "After", err) } case "config": if err := dec.Decode(&s.Config); err != nil { - return err + return fmt.Errorf("%s | %w", "Config", err) } } diff --git a/typedapi/types/dropprocessor.go b/typedapi/types/dropprocessor.go index 759ce55e08..5f1655baa8 100644 --- a/typedapi/types/dropprocessor.go +++ b/typedapi/types/dropprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // DropProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L605-L605 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L605-L605 type DropProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -64,7 +65,7 @@ func (s *DropProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -76,7 +77,7 @@ func (s *DropProcessor) UnmarshalJSON(data []byte) error { case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -92,7 +93,7 @@ func (s *DropProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -101,13 +102,13 @@ func (s *DropProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/duration.go b/typedapi/types/duration.go index 0c50bdc8c1..a70c16370b 100644 --- a/typedapi/types/duration.go +++ b/typedapi/types/duration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,5 +24,5 @@ package types // // string // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Time.ts#L52-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Time.ts#L52-L58 type Duration interface{} diff --git a/typedapi/types/durationvalueunitfloatmillis.go b/typedapi/types/durationvalueunitfloatmillis.go index d76a4e7623..77e984a126 100644 --- a/typedapi/types/durationvalueunitfloatmillis.go +++ b/typedapi/types/durationvalueunitfloatmillis.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DurationValueUnitFloatMillis type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Time.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Time.ts#L67-L67 type DurationValueUnitFloatMillis Float64 diff --git a/typedapi/types/durationvalueunitmillis.go b/typedapi/types/durationvalueunitmillis.go index 09abb38a93..59325e55bd 100644 --- a/typedapi/types/durationvalueunitmillis.go +++ b/typedapi/types/durationvalueunitmillis.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DurationValueUnitMillis type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Time.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Time.ts#L67-L67 type DurationValueUnitMillis int64 diff --git a/typedapi/types/durationvalueunitnanos.go b/typedapi/types/durationvalueunitnanos.go index 589f99be0e..d47fb345b8 100644 --- a/typedapi/types/durationvalueunitnanos.go +++ b/typedapi/types/durationvalueunitnanos.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DurationValueUnitNanos type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Time.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Time.ts#L67-L67 type DurationValueUnitNanos int64 diff --git a/typedapi/types/durationvalueunitseconds.go b/typedapi/types/durationvalueunitseconds.go index b2b2d69b0a..c5c0e322a0 100644 --- a/typedapi/types/durationvalueunitseconds.go +++ b/typedapi/types/durationvalueunitseconds.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // DurationValueUnitSeconds type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Time.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Time.ts#L67-L67 type DurationValueUnitSeconds int64 diff --git a/typedapi/types/dutchanalyzer.go b/typedapi/types/dutchanalyzer.go index e995aeb0d3..73099dd6eb 100644 --- a/typedapi/types/dutchanalyzer.go +++ b/typedapi/types/dutchanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // DutchAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/analyzers.ts#L61-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/analyzers.ts#L61-L64 type DutchAnalyzer struct { Stopwords []string `json:"stopwords,omitempty"` Type string `json:"type,omitempty"` @@ -56,19 +57,19 @@ func (s *DutchAnalyzer) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } s.Stopwords = append(s.Stopwords, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/dynamicproperty.go b/typedapi/types/dynamicproperty.go index 8ff4916fdc..711229f886 100644 --- a/typedapi/types/dynamicproperty.go +++ b/typedapi/types/dynamicproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -36,7 +37,7 @@ import ( // DynamicProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L285-L316 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L286-L317 type DynamicProperty struct { Analyzer *string `json:"analyzer,omitempty"` Boost *Float64 `json:"boost,omitempty"` @@ -91,7 +92,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -107,7 +108,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -123,7 +124,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -136,13 +137,13 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -153,7 +154,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -162,7 +163,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "eager_global_ordinals": @@ -172,7 +173,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EagerGlobalOrdinals", err) } s.EagerGlobalOrdinals = &value case bool: @@ -186,7 +187,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = &value case bool: @@ -503,7 +504,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -520,7 +521,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -535,7 +536,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -549,7 +550,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -558,7 +559,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "index_options": if err := dec.Decode(&s.IndexOptions); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexOptions", err) } case "index_phrases": @@ -568,7 +569,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPhrases", err) } s.IndexPhrases = &value case bool: @@ -577,13 +578,13 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "index_prefixes": if err := dec.Decode(&s.IndexPrefixes); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPrefixes", err) } case "locale": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Locale", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -597,7 +598,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "norms": @@ -607,7 +608,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Norms", err) } s.Norms = &value case bool: @@ -616,12 +617,12 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "null_value": if err := dec.Decode(&s.NullValue); err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } case "on_script_error": if err := dec.Decode(&s.OnScriptError); err != nil { - return err + return fmt.Errorf("%s | %w", "OnScriptError", err) } case "position_increment_gap": @@ -632,7 +633,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PositionIncrementGap", err) } s.PositionIncrementGap = &value case float64: @@ -648,7 +649,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrecisionStep", err) } s.PrecisionStep = &value case float64: @@ -966,7 +967,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -975,7 +976,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -984,7 +985,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -992,7 +993,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -1002,7 +1003,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "search_analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchAnalyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1014,7 +1015,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "search_quote_analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchQuoteAnalyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1026,7 +1027,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1042,7 +1043,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -1051,17 +1052,17 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "term_vector": if err := dec.Decode(&s.TermVector); err != nil { - return err + return fmt.Errorf("%s | %w", "TermVector", err) } case "time_series_metric": if err := dec.Decode(&s.TimeSeriesMetric); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesMetric", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/dynamictemplate.go b/typedapi/types/dynamictemplate.go index 9bf011ae20..ef6034065e 100644 --- a/typedapi/types/dynamictemplate.go +++ b/typedapi/types/dynamictemplate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // DynamicTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/dynamic-template.ts#L22-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/dynamic-template.ts#L22-L30 type DynamicTemplate struct { Mapping Property `json:"mapping,omitempty"` Match *string `json:"match,omitempty"` @@ -363,7 +364,7 @@ func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { case "match": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Match", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -375,7 +376,7 @@ func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { case "match_mapping_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MatchMappingType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -386,13 +387,13 @@ func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { case "match_pattern": if err := dec.Decode(&s.MatchPattern); err != nil { - return err + return fmt.Errorf("%s | %w", "MatchPattern", err) } case "path_match": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PathMatch", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -404,7 +405,7 @@ func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { case "path_unmatch": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PathUnmatch", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -416,7 +417,7 @@ func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { case "unmatch": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Unmatch", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/edgengramtokenfilter.go b/typedapi/types/edgengramtokenfilter.go index 76aeedba77..820cab9174 100644 --- a/typedapi/types/edgengramtokenfilter.go +++ b/typedapi/types/edgengramtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // EdgeNGramTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L79-L85 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L79-L85 type EdgeNGramTokenFilter struct { MaxGram *int `json:"max_gram,omitempty"` MinGram *int `json:"min_gram,omitempty"` @@ -65,7 +66,7 @@ func (s *EdgeNGramTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxGram", err) } s.MaxGram = &value case float64: @@ -81,7 +82,7 @@ func (s *EdgeNGramTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinGram", err) } s.MinGram = &value case float64: @@ -91,22 +92,22 @@ func (s *EdgeNGramTokenFilter) UnmarshalJSON(data []byte) error { case "preserve_original": if err := dec.Decode(&s.PreserveOriginal); err != nil { - return err + return fmt.Errorf("%s | %w", "PreserveOriginal", err) } case "side": if err := dec.Decode(&s.Side); err != nil { - return err + return fmt.Errorf("%s | %w", "Side", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/edgengramtokenizer.go b/typedapi/types/edgengramtokenizer.go index 90a79326cd..b077837bc4 100644 --- a/typedapi/types/edgengramtokenizer.go +++ b/typedapi/types/edgengramtokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // EdgeNGramTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L31-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L31-L37 type EdgeNGramTokenizer struct { CustomTokenChars *string `json:"custom_token_chars,omitempty"` MaxGram int `json:"max_gram"` @@ -60,7 +61,7 @@ func (s *EdgeNGramTokenizer) UnmarshalJSON(data []byte) error { case "custom_token_chars": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CustomTokenChars", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -77,7 +78,7 @@ func (s *EdgeNGramTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxGram", err) } s.MaxGram = value case float64: @@ -93,7 +94,7 @@ func (s *EdgeNGramTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinGram", err) } s.MinGram = value case float64: @@ -103,17 +104,17 @@ func (s *EdgeNGramTokenizer) UnmarshalJSON(data []byte) error { case "token_chars": if err := dec.Decode(&s.TokenChars); err != nil { - return err + return fmt.Errorf("%s | %w", "TokenChars", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/elasticsearchversioninfo.go b/typedapi/types/elasticsearchversioninfo.go index 86646f87ce..f1ff064698 100644 --- a/typedapi/types/elasticsearchversioninfo.go +++ b/typedapi/types/elasticsearchversioninfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ElasticsearchVersionInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Base.ts#L54-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Base.ts#L54-L64 type ElasticsearchVersionInfo struct { BuildDate DateTime `json:"build_date"` BuildFlavor string `json:"build_flavor"` @@ -60,13 +61,13 @@ func (s *ElasticsearchVersionInfo) UnmarshalJSON(data []byte) error { case "build_date": if err := dec.Decode(&s.BuildDate); err != nil { - return err + return fmt.Errorf("%s | %w", "BuildDate", err) } case "build_flavor": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BuildFlavor", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,7 +79,7 @@ func (s *ElasticsearchVersionInfo) UnmarshalJSON(data []byte) error { case "build_hash": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BuildHash", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -94,7 +95,7 @@ func (s *ElasticsearchVersionInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "BuildSnapshot", err) } s.BuildSnapshot = value case bool: @@ -104,7 +105,7 @@ func (s *ElasticsearchVersionInfo) UnmarshalJSON(data []byte) error { case "build_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BuildType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -116,7 +117,7 @@ func (s *ElasticsearchVersionInfo) UnmarshalJSON(data []byte) error { case "number": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Int", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -127,17 +128,17 @@ func (s *ElasticsearchVersionInfo) UnmarshalJSON(data []byte) error { case "lucene_version": if err := dec.Decode(&s.LuceneVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "LuceneVersion", err) } case "minimum_index_compatibility_version": if err := dec.Decode(&s.MinimumIndexCompatibilityVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumIndexCompatibilityVersion", err) } case "minimum_wire_compatibility_version": if err := dec.Decode(&s.MinimumWireCompatibilityVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumWireCompatibilityVersion", err) } } diff --git a/typedapi/types/elasticsearchversionmininfo.go b/typedapi/types/elasticsearchversionmininfo.go new file mode 100644 index 0000000000..4f0a8f696d --- /dev/null +++ b/typedapi/types/elasticsearchversionmininfo.go @@ -0,0 +1,101 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// ElasticsearchVersionMinInfo type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Base.ts#L66-L74 +type ElasticsearchVersionMinInfo struct { + BuildFlavor string `json:"build_flavor"` + Int string `json:"number"` + MinimumIndexCompatibilityVersion string `json:"minimum_index_compatibility_version"` + MinimumWireCompatibilityVersion string `json:"minimum_wire_compatibility_version"` +} + +func (s *ElasticsearchVersionMinInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "build_flavor": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "BuildFlavor", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.BuildFlavor = o + + case "number": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Int", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Int = o + + case "minimum_index_compatibility_version": + if err := dec.Decode(&s.MinimumIndexCompatibilityVersion); err != nil { + return fmt.Errorf("%s | %w", "MinimumIndexCompatibilityVersion", err) + } + + case "minimum_wire_compatibility_version": + if err := dec.Decode(&s.MinimumWireCompatibilityVersion); err != nil { + return fmt.Errorf("%s | %w", "MinimumWireCompatibilityVersion", err) + } + + } + } + return nil +} + +// NewElasticsearchVersionMinInfo returns a ElasticsearchVersionMinInfo. +func NewElasticsearchVersionMinInfo() *ElasticsearchVersionMinInfo { + r := &ElasticsearchVersionMinInfo{} + + return r +} diff --git a/typedapi/types/elisiontokenfilter.go b/typedapi/types/elisiontokenfilter.go index 3a3afcb8e2..c77aecca1b 100644 --- a/typedapi/types/elisiontokenfilter.go +++ b/typedapi/types/elisiontokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ElisionTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L187-L192 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L187-L192 type ElisionTokenFilter struct { Articles []string `json:"articles,omitempty"` ArticlesCase Stringifiedboolean `json:"articles_case,omitempty"` @@ -56,18 +57,18 @@ func (s *ElisionTokenFilter) UnmarshalJSON(data []byte) error { case "articles": if err := dec.Decode(&s.Articles); err != nil { - return err + return fmt.Errorf("%s | %w", "Articles", err) } case "articles_case": if err := dec.Decode(&s.ArticlesCase); err != nil { - return err + return fmt.Errorf("%s | %w", "ArticlesCase", err) } case "articles_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ArticlesPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,12 +79,12 @@ func (s *ElisionTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/email.go b/typedapi/types/email.go index 7a1aa072e2..dd350ef9d6 100644 --- a/typedapi/types/email.go +++ b/typedapi/types/email.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // Email type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L238-L250 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L238-L250 type Email struct { Attachments map[string]EmailAttachmentContainer `json:"attachments,omitempty"` Bcc []string `json:"bcc,omitempty"` @@ -67,28 +68,28 @@ func (s *Email) UnmarshalJSON(data []byte) error { s.Attachments = make(map[string]EmailAttachmentContainer, 0) } if err := dec.Decode(&s.Attachments); err != nil { - return err + return fmt.Errorf("%s | %w", "Attachments", err) } case "bcc": if err := dec.Decode(&s.Bcc); err != nil { - return err + return fmt.Errorf("%s | %w", "Bcc", err) } case "body": if err := dec.Decode(&s.Body); err != nil { - return err + return fmt.Errorf("%s | %w", "Body", err) } case "cc": if err := dec.Decode(&s.Cc); err != nil { - return err + return fmt.Errorf("%s | %w", "Cc", err) } case "from": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -99,28 +100,28 @@ func (s *Email) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "priority": if err := dec.Decode(&s.Priority); err != nil { - return err + return fmt.Errorf("%s | %w", "Priority", err) } case "reply_to": if err := dec.Decode(&s.ReplyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "ReplyTo", err) } case "sent_date": if err := dec.Decode(&s.SentDate); err != nil { - return err + return fmt.Errorf("%s | %w", "SentDate", err) } case "subject": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Subject", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -131,7 +132,7 @@ func (s *Email) UnmarshalJSON(data []byte) error { case "to": if err := dec.Decode(&s.To); err != nil { - return err + return fmt.Errorf("%s | %w", "To", err) } } diff --git a/typedapi/types/emailaction.go b/typedapi/types/emailaction.go index 5384827b52..9e9710d47e 100644 --- a/typedapi/types/emailaction.go +++ b/typedapi/types/emailaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // EmailAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L252-L252 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L252-L252 type EmailAction struct { Attachments map[string]EmailAttachmentContainer `json:"attachments,omitempty"` Bcc []string `json:"bcc,omitempty"` @@ -67,28 +68,28 @@ func (s *EmailAction) UnmarshalJSON(data []byte) error { s.Attachments = make(map[string]EmailAttachmentContainer, 0) } if err := dec.Decode(&s.Attachments); err != nil { - return err + return fmt.Errorf("%s | %w", "Attachments", err) } case "bcc": if err := dec.Decode(&s.Bcc); err != nil { - return err + return fmt.Errorf("%s | %w", "Bcc", err) } case "body": if err := dec.Decode(&s.Body); err != nil { - return err + return fmt.Errorf("%s | %w", "Body", err) } case "cc": if err := dec.Decode(&s.Cc); err != nil { - return err + return fmt.Errorf("%s | %w", "Cc", err) } case "from": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -99,28 +100,28 @@ func (s *EmailAction) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "priority": if err := dec.Decode(&s.Priority); err != nil { - return err + return fmt.Errorf("%s | %w", "Priority", err) } case "reply_to": if err := dec.Decode(&s.ReplyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "ReplyTo", err) } case "sent_date": if err := dec.Decode(&s.SentDate); err != nil { - return err + return fmt.Errorf("%s | %w", "SentDate", err) } case "subject": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Subject", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -131,7 +132,7 @@ func (s *EmailAction) UnmarshalJSON(data []byte) error { case "to": if err := dec.Decode(&s.To); err != nil { - return err + return fmt.Errorf("%s | %w", "To", err) } } diff --git a/typedapi/types/emailattachmentcontainer.go b/typedapi/types/emailattachmentcontainer.go index 7e112bf361..7ec0d5b9a9 100644 --- a/typedapi/types/emailattachmentcontainer.go +++ b/typedapi/types/emailattachmentcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // EmailAttachmentContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L211-L216 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L211-L216 type EmailAttachmentContainer struct { Data *DataEmailAttachment `json:"data,omitempty"` Http *HttpEmailAttachment `json:"http,omitempty"` diff --git a/typedapi/types/emailbody.go b/typedapi/types/emailbody.go index 67bd140c3c..0736b7c8dc 100644 --- a/typedapi/types/emailbody.go +++ b/typedapi/types/emailbody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // EmailBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L192-L195 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L192-L195 type EmailBody struct { Html *string `json:"html,omitempty"` Text *string `json:"text,omitempty"` @@ -54,7 +55,7 @@ func (s *EmailBody) UnmarshalJSON(data []byte) error { case "html": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Html", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,7 +67,7 @@ func (s *EmailBody) UnmarshalJSON(data []byte) error { case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/emailresult.go b/typedapi/types/emailresult.go index 5e37e718ee..dd04e3d07a 100644 --- a/typedapi/types/emailresult.go +++ b/typedapi/types/emailresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // EmailResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L205-L209 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L205-L209 type EmailResult struct { Account *string `json:"account,omitempty"` Message Email `json:"message"` @@ -55,7 +56,7 @@ func (s *EmailResult) UnmarshalJSON(data []byte) error { case "account": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Account", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,13 +67,13 @@ func (s *EmailResult) UnmarshalJSON(data []byte) error { case "message": if err := dec.Decode(&s.Message); err != nil { - return err + return fmt.Errorf("%s | %w", "Message", err) } case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/emptyobject.go b/typedapi/types/emptyobject.go index 72f0c9058d..f0ce18de2c 100644 --- a/typedapi/types/emptyobject.go +++ b/typedapi/types/emptyobject.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // EmptyObject type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L160-L161 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L160-L161 type EmptyObject struct { } diff --git a/typedapi/types/enrichpolicy.go b/typedapi/types/enrichpolicy.go index 84470c2083..df3c3428bb 100644 --- a/typedapi/types/enrichpolicy.go +++ b/typedapi/types/enrichpolicy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // EnrichPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/_types/Policy.ts#L34-L41 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/_types/Policy.ts#L34-L41 type EnrichPolicy struct { ElasticsearchVersion *string `json:"elasticsearch_version,omitempty"` EnrichFields []string `json:"enrich_fields"` @@ -58,7 +59,7 @@ func (s *EnrichPolicy) UnmarshalJSON(data []byte) error { case "elasticsearch_version": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ElasticsearchVersion", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -73,13 +74,13 @@ func (s *EnrichPolicy) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "EnrichFields", err) } s.EnrichFields = append(s.EnrichFields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.EnrichFields); err != nil { - return err + return fmt.Errorf("%s | %w", "EnrichFields", err) } } @@ -89,29 +90,29 @@ func (s *EnrichPolicy) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } case "match_field": if err := dec.Decode(&s.MatchField); err != nil { - return err + return fmt.Errorf("%s | %w", "MatchField", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } } diff --git a/typedapi/types/enrichprocessor.go b/typedapi/types/enrichprocessor.go index c7f744e0bb..60aee83f78 100644 --- a/typedapi/types/enrichprocessor.go +++ b/typedapi/types/enrichprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // EnrichProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L607-L646 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L607-L646 type EnrichProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -93,7 +94,7 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,13 +105,13 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -126,7 +127,7 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -140,7 +141,7 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -155,7 +156,7 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxMatches", err) } s.MaxMatches = &value case float64: @@ -165,7 +166,7 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "override": @@ -175,7 +176,7 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Override", err) } s.Override = &value case bool: @@ -185,7 +186,7 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { case "policy_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PolicyName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -196,13 +197,13 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { case "shape_relation": if err := dec.Decode(&s.ShapeRelation); err != nil { - return err + return fmt.Errorf("%s | %w", "ShapeRelation", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -213,7 +214,7 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/ensemble.go b/typedapi/types/ensemble.go index abe376fbfe..233514558f 100644 --- a/typedapi/types/ensemble.go +++ b/typedapi/types/ensemble.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Ensemble type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/types.ts#L93-L99 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/types.ts#L93-L99 type Ensemble struct { AggregateOutput *AggregateOutput `json:"aggregate_output,omitempty"` ClassificationLabels []string `json:"classification_labels,omitempty"` @@ -56,23 +57,23 @@ func (s *Ensemble) UnmarshalJSON(data []byte) error { case "aggregate_output": if err := dec.Decode(&s.AggregateOutput); err != nil { - return err + return fmt.Errorf("%s | %w", "AggregateOutput", err) } case "classification_labels": if err := dec.Decode(&s.ClassificationLabels); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassificationLabels", err) } case "feature_names": if err := dec.Decode(&s.FeatureNames); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureNames", err) } case "target_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,7 +84,7 @@ func (s *Ensemble) UnmarshalJSON(data []byte) error { case "trained_models": if err := dec.Decode(&s.TrainedModels); err != nil { - return err + return fmt.Errorf("%s | %w", "TrainedModels", err) } } diff --git a/typedapi/types/enums/accesstokengranttype/accesstokengranttype.go b/typedapi/types/enums/accesstokengranttype/accesstokengranttype.go index cf5e2727da..0743251ff2 100644 --- a/typedapi/types/enums/accesstokengranttype/accesstokengranttype.go +++ b/typedapi/types/enums/accesstokengranttype/accesstokengranttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package accesstokengranttype package accesstokengranttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_token/types.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_token/types.ts#L23-L28 type AccessTokenGrantType struct { Name string } diff --git a/typedapi/types/enums/acknowledgementoptions/acknowledgementoptions.go b/typedapi/types/enums/acknowledgementoptions/acknowledgementoptions.go index ca1d553c65..2e4c9eae14 100644 --- a/typedapi/types/enums/acknowledgementoptions/acknowledgementoptions.go +++ b/typedapi/types/enums/acknowledgementoptions/acknowledgementoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package acknowledgementoptions package acknowledgementoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Action.ts#L109-L113 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Action.ts#L109-L113 type AcknowledgementOptions struct { Name string } diff --git a/typedapi/types/enums/actionexecutionmode/actionexecutionmode.go b/typedapi/types/enums/actionexecutionmode/actionexecutionmode.go index 9d49cab64c..790d491c9b 100644 --- a/typedapi/types/enums/actionexecutionmode/actionexecutionmode.go +++ b/typedapi/types/enums/actionexecutionmode/actionexecutionmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package actionexecutionmode package actionexecutionmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Action.ts#L73-L94 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Action.ts#L73-L94 type ActionExecutionMode struct { Name string } diff --git a/typedapi/types/enums/actionstatusoptions/actionstatusoptions.go b/typedapi/types/enums/actionstatusoptions/actionstatusoptions.go index 0b32e3c986..3f3546d1e8 100644 --- a/typedapi/types/enums/actionstatusoptions/actionstatusoptions.go +++ b/typedapi/types/enums/actionstatusoptions/actionstatusoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package actionstatusoptions package actionstatusoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Action.ts#L102-L107 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Action.ts#L102-L107 type ActionStatusOptions struct { Name string } diff --git a/typedapi/types/enums/actiontype/actiontype.go b/typedapi/types/enums/actiontype/actiontype.go index 586cc76d4d..dab5bc617b 100644 --- a/typedapi/types/enums/actiontype/actiontype.go +++ b/typedapi/types/enums/actiontype/actiontype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package actiontype package actiontype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Action.ts#L64-L71 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Action.ts#L64-L71 type ActionType struct { Name string } diff --git a/typedapi/types/enums/allocationexplaindecision/allocationexplaindecision.go b/typedapi/types/enums/allocationexplaindecision/allocationexplaindecision.go index 63b423b28e..a67a6aa7af 100644 --- a/typedapi/types/enums/allocationexplaindecision/allocationexplaindecision.go +++ b/typedapi/types/enums/allocationexplaindecision/allocationexplaindecision.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package allocationexplaindecision package allocationexplaindecision import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/types.ts#L32-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/types.ts#L32-L37 type AllocationExplainDecision struct { Name string } diff --git a/typedapi/types/enums/apikeygranttype/apikeygranttype.go b/typedapi/types/enums/apikeygranttype/apikeygranttype.go index db130ce68e..992bb3ac56 100644 --- a/typedapi/types/enums/apikeygranttype/apikeygranttype.go +++ b/typedapi/types/enums/apikeygranttype/apikeygranttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package apikeygranttype package apikeygranttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/grant_api_key/types.ts#L48-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/grant_api_key/types.ts#L48-L51 type ApiKeyGrantType struct { Name string } diff --git a/typedapi/types/enums/appliesto/appliesto.go b/typedapi/types/enums/appliesto/appliesto.go index b482ac7b3d..254e6a37a7 100644 --- a/typedapi/types/enums/appliesto/appliesto.go +++ b/typedapi/types/enums/appliesto/appliesto.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package appliesto package appliesto import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Rule.ts#L67-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Rule.ts#L67-L72 type AppliesTo struct { Name string } diff --git a/typedapi/types/enums/boundaryscanner/boundaryscanner.go b/typedapi/types/enums/boundaryscanner/boundaryscanner.go index b48d18d4cb..da1511761f 100644 --- a/typedapi/types/enums/boundaryscanner/boundaryscanner.go +++ b/typedapi/types/enums/boundaryscanner/boundaryscanner.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package boundaryscanner package boundaryscanner import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/highlighting.ts#L27-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/highlighting.ts#L27-L46 type BoundaryScanner struct { Name string } diff --git a/typedapi/types/enums/bytes/bytes.go b/typedapi/types/enums/bytes/bytes.go index 596eb39ffa..34d518af7c 100644 --- a/typedapi/types/enums/bytes/bytes.go +++ b/typedapi/types/enums/bytes/bytes.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package bytes package bytes import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L169-L181 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L169-L181 type Bytes struct { Name string } diff --git a/typedapi/types/enums/calendarinterval/calendarinterval.go b/typedapi/types/enums/calendarinterval/calendarinterval.go index c18809deb5..e7ed78c33e 100644 --- a/typedapi/types/enums/calendarinterval/calendarinterval.go +++ b/typedapi/types/enums/calendarinterval/calendarinterval.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package calendarinterval package calendarinterval import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L249-L266 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L249-L266 type CalendarInterval struct { Name string } diff --git a/typedapi/types/enums/cardinalityexecutionmode/cardinalityexecutionmode.go b/typedapi/types/enums/cardinalityexecutionmode/cardinalityexecutionmode.go index dd4955b9e4..5403fc1a5b 100644 --- a/typedapi/types/enums/cardinalityexecutionmode/cardinalityexecutionmode.go +++ b/typedapi/types/enums/cardinalityexecutionmode/cardinalityexecutionmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package cardinalityexecutionmode package cardinalityexecutionmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L64-L85 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L64-L85 type CardinalityExecutionMode struct { Name string } diff --git a/typedapi/types/enums/catanomalydetectorcolumn/catanomalydetectorcolumn.go b/typedapi/types/enums/catanomalydetectorcolumn/catanomalydetectorcolumn.go index 280bce4604..ac62752660 100644 --- a/typedapi/types/enums/catanomalydetectorcolumn/catanomalydetectorcolumn.go +++ b/typedapi/types/enums/catanomalydetectorcolumn/catanomalydetectorcolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package catanomalydetectorcolumn package catanomalydetectorcolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/_types/CatBase.ts#L32-L401 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/_types/CatBase.ts#L32-L401 type CatAnomalyDetectorColumn struct { Name string } diff --git a/typedapi/types/enums/catdatafeedcolumn/catdatafeedcolumn.go b/typedapi/types/enums/catdatafeedcolumn/catdatafeedcolumn.go index 0117d04202..d8d375c9ac 100644 --- a/typedapi/types/enums/catdatafeedcolumn/catdatafeedcolumn.go +++ b/typedapi/types/enums/catdatafeedcolumn/catdatafeedcolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package catdatafeedcolumn package catdatafeedcolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/_types/CatBase.ts#L405-L471 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/_types/CatBase.ts#L405-L471 type CatDatafeedColumn struct { Name string } diff --git a/typedapi/types/enums/catdfacolumn/catdfacolumn.go b/typedapi/types/enums/catdfacolumn/catdfacolumn.go index dc722ccecd..fde475d4ac 100644 --- a/typedapi/types/enums/catdfacolumn/catdfacolumn.go +++ b/typedapi/types/enums/catdfacolumn/catdfacolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package catdfacolumn package catdfacolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/_types/CatBase.ts#L472-L557 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/_types/CatBase.ts#L472-L557 type CatDfaColumn struct { Name string } diff --git a/typedapi/types/enums/categorizationstatus/categorizationstatus.go b/typedapi/types/enums/categorizationstatus/categorizationstatus.go index a979807cd4..9152bb0eb8 100644 --- a/typedapi/types/enums/categorizationstatus/categorizationstatus.go +++ b/typedapi/types/enums/categorizationstatus/categorizationstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package categorizationstatus package categorizationstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Model.ts#L83-L86 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Model.ts#L83-L86 type CategorizationStatus struct { Name string } diff --git a/typedapi/types/enums/cattrainedmodelscolumn/cattrainedmodelscolumn.go b/typedapi/types/enums/cattrainedmodelscolumn/cattrainedmodelscolumn.go index 77c3f0b91c..638c82b5e3 100644 --- a/typedapi/types/enums/cattrainedmodelscolumn/cattrainedmodelscolumn.go +++ b/typedapi/types/enums/cattrainedmodelscolumn/cattrainedmodelscolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package cattrainedmodelscolumn package cattrainedmodelscolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/_types/CatBase.ts#L561-L635 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/_types/CatBase.ts#L561-L635 type CatTrainedModelsColumn struct { Name string } diff --git a/typedapi/types/enums/cattransformcolumn/cattransformcolumn.go b/typedapi/types/enums/cattransformcolumn/cattransformcolumn.go index c30f22c30d..ea7b2b0ab2 100644 --- a/typedapi/types/enums/cattransformcolumn/cattransformcolumn.go +++ b/typedapi/types/enums/cattransformcolumn/cattransformcolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package cattransformcolumn package cattransformcolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/_types/CatBase.ts#L640-L844 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/_types/CatBase.ts#L640-L844 type CatTransformColumn struct { Name string } diff --git a/typedapi/types/enums/childscoremode/childscoremode.go b/typedapi/types/enums/childscoremode/childscoremode.go index 44ca512314..1f9319da3d 100644 --- a/typedapi/types/enums/childscoremode/childscoremode.go +++ b/typedapi/types/enums/childscoremode/childscoremode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package childscoremode package childscoremode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/joining.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/joining.ts#L25-L39 type ChildScoreMode struct { Name string } diff --git a/typedapi/types/enums/chunkingmode/chunkingmode.go b/typedapi/types/enums/chunkingmode/chunkingmode.go index ca1ffc8a72..edda6db783 100644 --- a/typedapi/types/enums/chunkingmode/chunkingmode.go +++ b/typedapi/types/enums/chunkingmode/chunkingmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package chunkingmode package chunkingmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Datafeed.ts#L233-L237 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Datafeed.ts#L233-L237 type ChunkingMode struct { Name string } diff --git a/typedapi/types/enums/clusterinfotarget/clusterinfotarget.go b/typedapi/types/enums/clusterinfotarget/clusterinfotarget.go index 4e477a72a2..b4a9d4f63b 100644 --- a/typedapi/types/enums/clusterinfotarget/clusterinfotarget.go +++ b/typedapi/types/enums/clusterinfotarget/clusterinfotarget.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package clusterinfotarget package clusterinfotarget import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L378-L384 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L378-L384 type ClusterInfoTarget struct { Name string } diff --git a/typedapi/types/enums/clusterprivilege/clusterprivilege.go b/typedapi/types/enums/clusterprivilege/clusterprivilege.go index 0e242b2381..b688fbb850 100644 --- a/typedapi/types/enums/clusterprivilege/clusterprivilege.go +++ b/typedapi/types/enums/clusterprivilege/clusterprivilege.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package clusterprivilege package clusterprivilege import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/Privileges.ts#L41-L80 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/Privileges.ts#L41-L80 type ClusterPrivilege struct { Name string } diff --git a/typedapi/types/enums/clustersearchstatus/clustersearchstatus.go b/typedapi/types/enums/clustersearchstatus/clustersearchstatus.go index 1a2189f917..d0a0cf131e 100644 --- a/typedapi/types/enums/clustersearchstatus/clustersearchstatus.go +++ b/typedapi/types/enums/clustersearchstatus/clustersearchstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package clustersearchstatus package clustersearchstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L37-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L37-L43 type ClusterSearchStatus struct { Name string } diff --git a/typedapi/types/enums/combinedfieldsoperator/combinedfieldsoperator.go b/typedapi/types/enums/combinedfieldsoperator/combinedfieldsoperator.go index 14dc41245e..7ecd44391d 100644 --- a/typedapi/types/enums/combinedfieldsoperator/combinedfieldsoperator.go +++ b/typedapi/types/enums/combinedfieldsoperator/combinedfieldsoperator.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package combinedfieldsoperator package combinedfieldsoperator import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/abstractions.ts#L489-L492 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/abstractions.ts#L489-L492 type CombinedFieldsOperator struct { Name string } diff --git a/typedapi/types/enums/combinedfieldszeroterms/combinedfieldszeroterms.go b/typedapi/types/enums/combinedfieldszeroterms/combinedfieldszeroterms.go index 8db44e6911..67eb27f4ae 100644 --- a/typedapi/types/enums/combinedfieldszeroterms/combinedfieldszeroterms.go +++ b/typedapi/types/enums/combinedfieldszeroterms/combinedfieldszeroterms.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package combinedfieldszeroterms package combinedfieldszeroterms import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/abstractions.ts#L494-L503 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/abstractions.ts#L494-L503 type CombinedFieldsZeroTerms struct { Name string } diff --git a/typedapi/types/enums/conditionop/conditionop.go b/typedapi/types/enums/conditionop/conditionop.go index 84c9e48927..a6777f89d4 100644 --- a/typedapi/types/enums/conditionop/conditionop.go +++ b/typedapi/types/enums/conditionop/conditionop.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package conditionop package conditionop import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Conditions.ts#L38-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Conditions.ts#L38-L45 type ConditionOp struct { Name string } diff --git a/typedapi/types/enums/conditionoperator/conditionoperator.go b/typedapi/types/enums/conditionoperator/conditionoperator.go index 402e731c3f..e2c0ebdaa9 100644 --- a/typedapi/types/enums/conditionoperator/conditionoperator.go +++ b/typedapi/types/enums/conditionoperator/conditionoperator.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package conditionoperator package conditionoperator import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Rule.ts#L74-L79 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Rule.ts#L74-L79 type ConditionOperator struct { Name string } diff --git a/typedapi/types/enums/conditiontype/conditiontype.go b/typedapi/types/enums/conditiontype/conditiontype.go index 09c47712a2..2a1622e642 100644 --- a/typedapi/types/enums/conditiontype/conditiontype.go +++ b/typedapi/types/enums/conditiontype/conditiontype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package conditiontype package conditiontype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Conditions.ts#L61-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Conditions.ts#L61-L67 type ConditionType struct { Name string } diff --git a/typedapi/types/enums/conflicts/conflicts.go b/typedapi/types/enums/conflicts/conflicts.go index f7f6ff1efc..07bde64ddb 100644 --- a/typedapi/types/enums/conflicts/conflicts.go +++ b/typedapi/types/enums/conflicts/conflicts.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package conflicts package conflicts import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L183-L192 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L183-L192 type Conflicts struct { Name string } diff --git a/typedapi/types/enums/connectionscheme/connectionscheme.go b/typedapi/types/enums/connectionscheme/connectionscheme.go index 8e6ae69710..ad71fe8492 100644 --- a/typedapi/types/enums/connectionscheme/connectionscheme.go +++ b/typedapi/types/enums/connectionscheme/connectionscheme.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package connectionscheme package connectionscheme import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L39-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L39-L42 type ConnectionScheme struct { Name string } diff --git a/typedapi/types/enums/converttype/converttype.go b/typedapi/types/enums/converttype/converttype.go index abfecab313..1adad8dfae 100644 --- a/typedapi/types/enums/converttype/converttype.go +++ b/typedapi/types/enums/converttype/converttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package converttype package converttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L435-L443 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L435-L443 type ConvertType struct { Name string } diff --git a/typedapi/types/enums/dataattachmentformat/dataattachmentformat.go b/typedapi/types/enums/dataattachmentformat/dataattachmentformat.go index e24575850a..14fd058621 100644 --- a/typedapi/types/enums/dataattachmentformat/dataattachmentformat.go +++ b/typedapi/types/enums/dataattachmentformat/dataattachmentformat.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package dataattachmentformat package dataattachmentformat import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L187-L190 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L187-L190 type DataAttachmentFormat struct { Name string } diff --git a/typedapi/types/enums/datafeedstate/datafeedstate.go b/typedapi/types/enums/datafeedstate/datafeedstate.go index ab107329ab..5cc8ca8210 100644 --- a/typedapi/types/enums/datafeedstate/datafeedstate.go +++ b/typedapi/types/enums/datafeedstate/datafeedstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package datafeedstate package datafeedstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Datafeed.ts#L133-L138 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Datafeed.ts#L133-L138 type DatafeedState struct { Name string } diff --git a/typedapi/types/enums/dataframestate/dataframestate.go b/typedapi/types/enums/dataframestate/dataframestate.go index 16525cc207..02790631e3 100644 --- a/typedapi/types/enums/dataframestate/dataframestate.go +++ b/typedapi/types/enums/dataframestate/dataframestate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package dataframestate package dataframestate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Dataframe.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Dataframe.ts#L20-L26 type DataframeState struct { Name string } diff --git a/typedapi/types/enums/day/day.go b/typedapi/types/enums/day/day.go index 7741522d95..72d4810574 100644 --- a/typedapi/types/enums/day/day.go +++ b/typedapi/types/enums/day/day.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package day package day import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Schedule.ts#L37-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Schedule.ts#L37-L45 type Day struct { Name string } diff --git a/typedapi/types/enums/decision/decision.go b/typedapi/types/enums/decision/decision.go index c01e2acedf..485741fad4 100644 --- a/typedapi/types/enums/decision/decision.go +++ b/typedapi/types/enums/decision/decision.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package decision package decision import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/types.ts#L86-L95 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/types.ts#L86-L95 type Decision struct { Name string } diff --git a/typedapi/types/enums/delimitedpayloadencoding/delimitedpayloadencoding.go b/typedapi/types/enums/delimitedpayloadencoding/delimitedpayloadencoding.go index 1346727951..2213b6f8b8 100644 --- a/typedapi/types/enums/delimitedpayloadencoding/delimitedpayloadencoding.go +++ b/typedapi/types/enums/delimitedpayloadencoding/delimitedpayloadencoding.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package delimitedpayloadencoding package delimitedpayloadencoding import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L62-L66 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L62-L66 type DelimitedPayloadEncoding struct { Name string } diff --git a/typedapi/types/enums/deploymentallocationstate/deploymentallocationstate.go b/typedapi/types/enums/deploymentallocationstate/deploymentallocationstate.go index 00630deee8..0e4ccb5275 100644 --- a/typedapi/types/enums/deploymentallocationstate/deploymentallocationstate.go +++ b/typedapi/types/enums/deploymentallocationstate/deploymentallocationstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package deploymentallocationstate package deploymentallocationstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L289-L302 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L289-L302 type DeploymentAllocationState struct { Name string } diff --git a/typedapi/types/enums/deploymentassignmentstate/deploymentassignmentstate.go b/typedapi/types/enums/deploymentassignmentstate/deploymentassignmentstate.go index 6bda3a9a6a..acaa0abc0e 100644 --- a/typedapi/types/enums/deploymentassignmentstate/deploymentassignmentstate.go +++ b/typedapi/types/enums/deploymentassignmentstate/deploymentassignmentstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package deploymentassignmentstate package deploymentassignmentstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L304-L309 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L304-L309 type DeploymentAssignmentState struct { Name string } diff --git a/typedapi/types/enums/deploymentstate/deploymentstate.go b/typedapi/types/enums/deploymentstate/deploymentstate.go index 80dc7be921..e054e372b0 100644 --- a/typedapi/types/enums/deploymentstate/deploymentstate.go +++ b/typedapi/types/enums/deploymentstate/deploymentstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package deploymentstate package deploymentstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L274-L287 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L274-L287 type DeploymentState struct { Name string } diff --git a/typedapi/types/enums/deprecationlevel/deprecationlevel.go b/typedapi/types/enums/deprecationlevel/deprecationlevel.go index 01f6e91639..ae6af01205 100644 --- a/typedapi/types/enums/deprecationlevel/deprecationlevel.go +++ b/typedapi/types/enums/deprecationlevel/deprecationlevel.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package deprecationlevel package deprecationlevel import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/migration/deprecations/types.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/migration/deprecations/types.ts#L20-L27 type DeprecationLevel struct { Name string } diff --git a/typedapi/types/enums/dfiindependencemeasure/dfiindependencemeasure.go b/typedapi/types/enums/dfiindependencemeasure/dfiindependencemeasure.go index 7378a2056e..f1991c08c0 100644 --- a/typedapi/types/enums/dfiindependencemeasure/dfiindependencemeasure.go +++ b/typedapi/types/enums/dfiindependencemeasure/dfiindependencemeasure.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package dfiindependencemeasure package dfiindependencemeasure import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Similarity.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Similarity.ts#L20-L24 type DFIIndependenceMeasure struct { Name string } diff --git a/typedapi/types/enums/dfraftereffect/dfraftereffect.go b/typedapi/types/enums/dfraftereffect/dfraftereffect.go index 8add918145..d4ad605f90 100644 --- a/typedapi/types/enums/dfraftereffect/dfraftereffect.go +++ b/typedapi/types/enums/dfraftereffect/dfraftereffect.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package dfraftereffect package dfraftereffect import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Similarity.ts#L26-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Similarity.ts#L26-L30 type DFRAfterEffect struct { Name string } diff --git a/typedapi/types/enums/dfrbasicmodel/dfrbasicmodel.go b/typedapi/types/enums/dfrbasicmodel/dfrbasicmodel.go index 67c602a743..3bc84b7c44 100644 --- a/typedapi/types/enums/dfrbasicmodel/dfrbasicmodel.go +++ b/typedapi/types/enums/dfrbasicmodel/dfrbasicmodel.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package dfrbasicmodel package dfrbasicmodel import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Similarity.ts#L32-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Similarity.ts#L32-L40 type DFRBasicModel struct { Name string } diff --git a/typedapi/types/enums/distanceunit/distanceunit.go b/typedapi/types/enums/distanceunit/distanceunit.go index 2694e7a4f5..a7d6f3223a 100644 --- a/typedapi/types/enums/distanceunit/distanceunit.go +++ b/typedapi/types/enums/distanceunit/distanceunit.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package distanceunit package distanceunit import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Geo.ts#L30-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Geo.ts#L30-L40 type DistanceUnit struct { Name string } diff --git a/typedapi/types/enums/dynamicmapping/dynamicmapping.go b/typedapi/types/enums/dynamicmapping/dynamicmapping.go index 035d459d0a..0fc83ea600 100644 --- a/typedapi/types/enums/dynamicmapping/dynamicmapping.go +++ b/typedapi/types/enums/dynamicmapping/dynamicmapping.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package dynamicmapping package dynamicmapping import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/dynamic-template.ts#L37-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/dynamic-template.ts#L37-L46 type DynamicMapping struct { Name string } diff --git a/typedapi/types/enums/edgengramside/edgengramside.go b/typedapi/types/enums/edgengramside/edgengramside.go index b4b50bf148..1e217a3308 100644 --- a/typedapi/types/enums/edgengramside/edgengramside.go +++ b/typedapi/types/enums/edgengramside/edgengramside.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package edgengramside package edgengramside import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L74-L77 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L74-L77 type EdgeNGramSide struct { Name string } diff --git a/typedapi/types/enums/emailpriority/emailpriority.go b/typedapi/types/enums/emailpriority/emailpriority.go index 2d900245fe..37350009a9 100644 --- a/typedapi/types/enums/emailpriority/emailpriority.go +++ b/typedapi/types/enums/emailpriority/emailpriority.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package emailpriority package emailpriority import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L197-L203 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L197-L203 type EmailPriority struct { Name string } diff --git a/typedapi/types/enums/enrichpolicyphase/enrichpolicyphase.go b/typedapi/types/enums/enrichpolicyphase/enrichpolicyphase.go index 3901fcf56b..e58f9a0bdf 100644 --- a/typedapi/types/enums/enrichpolicyphase/enrichpolicyphase.go +++ b/typedapi/types/enums/enrichpolicyphase/enrichpolicyphase.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package enrichpolicyphase package enrichpolicyphase import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/execute_policy/types.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/execute_policy/types.ts#L24-L29 type EnrichPolicyPhase struct { Name string } diff --git a/typedapi/types/enums/excludefrequent/excludefrequent.go b/typedapi/types/enums/excludefrequent/excludefrequent.go index 4b0dc7ba4f..6605b8bc4d 100644 --- a/typedapi/types/enums/excludefrequent/excludefrequent.go +++ b/typedapi/types/enums/excludefrequent/excludefrequent.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package excludefrequent package excludefrequent import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Detector.ts#L127-L132 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Detector.ts#L127-L132 type ExcludeFrequent struct { Name string } diff --git a/typedapi/types/enums/executionphase/executionphase.go b/typedapi/types/enums/executionphase/executionphase.go index e2f83ad490..c1d4f27b69 100644 --- a/typedapi/types/enums/executionphase/executionphase.go +++ b/typedapi/types/enums/executionphase/executionphase.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package executionphase package executionphase import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Execution.ts#L49-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Execution.ts#L49-L58 type ExecutionPhase struct { Name string } diff --git a/typedapi/types/enums/executionstatus/executionstatus.go b/typedapi/types/enums/executionstatus/executionstatus.go index 74e7462d1b..3b8d9b3528 100644 --- a/typedapi/types/enums/executionstatus/executionstatus.go +++ b/typedapi/types/enums/executionstatus/executionstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package executionstatus package executionstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Execution.ts#L38-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Execution.ts#L38-L47 type ExecutionStatus struct { Name string } diff --git a/typedapi/types/enums/expandwildcard/expandwildcard.go b/typedapi/types/enums/expandwildcard/expandwildcard.go index 33d98ab81b..59aadd160e 100644 --- a/typedapi/types/enums/expandwildcard/expandwildcard.go +++ b/typedapi/types/enums/expandwildcard/expandwildcard.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package expandwildcard package expandwildcard import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L201-L215 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L201-L215 type ExpandWildcard struct { Name string } diff --git a/typedapi/types/enums/feature/feature.go b/typedapi/types/enums/feature/feature.go index 2a9a15e0ee..8aa8a55c10 100644 --- a/typedapi/types/enums/feature/feature.go +++ b/typedapi/types/enums/feature/feature.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package feature package feature import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get/IndicesGetRequest.ts#L90-L94 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get/IndicesGetRequest.ts#L90-L94 type Feature struct { Name string } diff --git a/typedapi/types/enums/fieldsortnumerictype/fieldsortnumerictype.go b/typedapi/types/enums/fieldsortnumerictype/fieldsortnumerictype.go index e3bfbc0a50..1a1fb917cf 100644 --- a/typedapi/types/enums/fieldsortnumerictype/fieldsortnumerictype.go +++ b/typedapi/types/enums/fieldsortnumerictype/fieldsortnumerictype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package fieldsortnumerictype package fieldsortnumerictype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/sort.ts#L37-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/sort.ts#L37-L42 type FieldSortNumericType struct { Name string } diff --git a/typedapi/types/enums/fieldtype/fieldtype.go b/typedapi/types/enums/fieldtype/fieldtype.go index 3fe2319a2f..fe47b05545 100644 --- a/typedapi/types/enums/fieldtype/fieldtype.go +++ b/typedapi/types/enums/fieldtype/fieldtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package fieldtype package fieldtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/Property.ts#L160-L204 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/Property.ts#L160-L204 type FieldType struct { Name string } diff --git a/typedapi/types/enums/fieldvaluefactormodifier/fieldvaluefactormodifier.go b/typedapi/types/enums/fieldvaluefactormodifier/fieldvaluefactormodifier.go index c7dd6739d7..15049192e2 100644 --- a/typedapi/types/enums/fieldvaluefactormodifier/fieldvaluefactormodifier.go +++ b/typedapi/types/enums/fieldvaluefactormodifier/fieldvaluefactormodifier.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package fieldvaluefactormodifier package fieldvaluefactormodifier import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L298-L341 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L298-L341 type FieldValueFactorModifier struct { Name string } diff --git a/typedapi/types/enums/filtertype/filtertype.go b/typedapi/types/enums/filtertype/filtertype.go index 88493a2b51..40d48d1db3 100644 --- a/typedapi/types/enums/filtertype/filtertype.go +++ b/typedapi/types/enums/filtertype/filtertype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package filtertype package filtertype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Filter.ts#L43-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Filter.ts#L43-L46 type FilterType struct { Name string } diff --git a/typedapi/types/enums/followerindexstatus/followerindexstatus.go b/typedapi/types/enums/followerindexstatus/followerindexstatus.go index 70e37a70f9..e6d5ce9dcd 100644 --- a/typedapi/types/enums/followerindexstatus/followerindexstatus.go +++ b/typedapi/types/enums/followerindexstatus/followerindexstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package followerindexstatus package followerindexstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/follow_info/types.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/follow_info/types.ts#L30-L33 type FollowerIndexStatus struct { Name string } diff --git a/typedapi/types/enums/functionboostmode/functionboostmode.go b/typedapi/types/enums/functionboostmode/functionboostmode.go index 7c47836aa9..9fa44ef1d3 100644 --- a/typedapi/types/enums/functionboostmode/functionboostmode.go +++ b/typedapi/types/enums/functionboostmode/functionboostmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package functionboostmode package functionboostmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L270-L296 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L270-L296 type FunctionBoostMode struct { Name string } diff --git a/typedapi/types/enums/functionscoremode/functionscoremode.go b/typedapi/types/enums/functionscoremode/functionscoremode.go index f0f9e1f3cb..94bcc9fa58 100644 --- a/typedapi/types/enums/functionscoremode/functionscoremode.go +++ b/typedapi/types/enums/functionscoremode/functionscoremode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package functionscoremode package functionscoremode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L243-L268 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L243-L268 type FunctionScoreMode struct { Name string } diff --git a/typedapi/types/enums/gappolicy/gappolicy.go b/typedapi/types/enums/gappolicy/gappolicy.go index 96c8d16ba7..cdadee3bd1 100644 --- a/typedapi/types/enums/gappolicy/gappolicy.go +++ b/typedapi/types/enums/gappolicy/gappolicy.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package gappolicy package gappolicy import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L61-L76 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L61-L76 type GapPolicy struct { Name string } diff --git a/typedapi/types/enums/geodistancetype/geodistancetype.go b/typedapi/types/enums/geodistancetype/geodistancetype.go index 28f11365cb..f966084ec5 100644 --- a/typedapi/types/enums/geodistancetype/geodistancetype.go +++ b/typedapi/types/enums/geodistancetype/geodistancetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package geodistancetype package geodistancetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Geo.ts#L42-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Geo.ts#L42-L51 type GeoDistanceType struct { Name string } diff --git a/typedapi/types/enums/geoexecution/geoexecution.go b/typedapi/types/enums/geoexecution/geoexecution.go index 05da9ad88f..5865fd493c 100644 --- a/typedapi/types/enums/geoexecution/geoexecution.go +++ b/typedapi/types/enums/geoexecution/geoexecution.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package geoexecution package geoexecution import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/geo.ts#L52-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/geo.ts#L52-L55 type GeoExecution struct { Name string } diff --git a/typedapi/types/enums/geoorientation/geoorientation.go b/typedapi/types/enums/geoorientation/geoorientation.go index ad80d9145b..a8ad35d7b1 100644 --- a/typedapi/types/enums/geoorientation/geoorientation.go +++ b/typedapi/types/enums/geoorientation/geoorientation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package geoorientation package geoorientation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/geo.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/geo.ts#L34-L39 type GeoOrientation struct { Name string } diff --git a/typedapi/types/enums/geoshaperelation/geoshaperelation.go b/typedapi/types/enums/geoshaperelation/geoshaperelation.go index 315185a322..d4de0c186e 100644 --- a/typedapi/types/enums/geoshaperelation/geoshaperelation.go +++ b/typedapi/types/enums/geoshaperelation/geoshaperelation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package geoshaperelation package geoshaperelation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Geo.ts#L64-L82 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Geo.ts#L64-L82 type GeoShapeRelation struct { Name string } diff --git a/typedapi/types/enums/geostrategy/geostrategy.go b/typedapi/types/enums/geostrategy/geostrategy.go index 44e2520db4..0f328f78af 100644 --- a/typedapi/types/enums/geostrategy/geostrategy.go +++ b/typedapi/types/enums/geostrategy/geostrategy.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package geostrategy package geostrategy import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/geo.ts#L52-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/geo.ts#L56-L59 type GeoStrategy struct { Name string } diff --git a/typedapi/types/enums/geovalidationmethod/geovalidationmethod.go b/typedapi/types/enums/geovalidationmethod/geovalidationmethod.go index d9afd05c06..53f785c745 100644 --- a/typedapi/types/enums/geovalidationmethod/geovalidationmethod.go +++ b/typedapi/types/enums/geovalidationmethod/geovalidationmethod.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package geovalidationmethod package geovalidationmethod import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/geo.ts#L147-L157 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/geo.ts#L147-L157 type GeoValidationMethod struct { Name string } diff --git a/typedapi/types/enums/granttype/granttype.go b/typedapi/types/enums/granttype/granttype.go index 73bdf9995f..3efd9b395f 100644 --- a/typedapi/types/enums/granttype/granttype.go +++ b/typedapi/types/enums/granttype/granttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package granttype package granttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/GrantType.ts#L20-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/GrantType.ts#L20-L29 type GrantType struct { Name string } diff --git a/typedapi/types/enums/gridaggregationtype/gridaggregationtype.go b/typedapi/types/enums/gridaggregationtype/gridaggregationtype.go index f98fc68874..335b1315c5 100644 --- a/typedapi/types/enums/gridaggregationtype/gridaggregationtype.go +++ b/typedapi/types/enums/gridaggregationtype/gridaggregationtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package gridaggregationtype package gridaggregationtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search_mvt/_types/GridType.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search_mvt/_types/GridType.ts#L30-L33 type GridAggregationType struct { Name string } diff --git a/typedapi/types/enums/gridtype/gridtype.go b/typedapi/types/enums/gridtype/gridtype.go index 4c2ba82dcd..9339ffe5f9 100644 --- a/typedapi/types/enums/gridtype/gridtype.go +++ b/typedapi/types/enums/gridtype/gridtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package gridtype package gridtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search_mvt/_types/GridType.ts#L20-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search_mvt/_types/GridType.ts#L20-L28 type GridType struct { Name string } diff --git a/typedapi/types/enums/groupby/groupby.go b/typedapi/types/enums/groupby/groupby.go index 5afe705ff8..17c7e24ad4 100644 --- a/typedapi/types/enums/groupby/groupby.go +++ b/typedapi/types/enums/groupby/groupby.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package groupby package groupby import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/tasks/_types/GroupBy.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/tasks/_types/GroupBy.ts#L20-L27 type GroupBy struct { Name string } diff --git a/typedapi/types/enums/healthstatus/healthstatus.go b/typedapi/types/enums/healthstatus/healthstatus.go index 5d6246aeb8..e393b52273 100644 --- a/typedapi/types/enums/healthstatus/healthstatus.go +++ b/typedapi/types/enums/healthstatus/healthstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package healthstatus package healthstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L219-L239 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L219-L239 type HealthStatus struct { Name string } diff --git a/typedapi/types/enums/highlighterencoder/highlighterencoder.go b/typedapi/types/enums/highlighterencoder/highlighterencoder.go index b48ffad41e..6a3f90c2a9 100644 --- a/typedapi/types/enums/highlighterencoder/highlighterencoder.go +++ b/typedapi/types/enums/highlighterencoder/highlighterencoder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package highlighterencoder package highlighterencoder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/highlighting.ts#L158-L161 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/highlighting.ts#L158-L161 type HighlighterEncoder struct { Name string } diff --git a/typedapi/types/enums/highlighterfragmenter/highlighterfragmenter.go b/typedapi/types/enums/highlighterfragmenter/highlighterfragmenter.go index 5a767c9eb3..75b1d5c759 100644 --- a/typedapi/types/enums/highlighterfragmenter/highlighterfragmenter.go +++ b/typedapi/types/enums/highlighterfragmenter/highlighterfragmenter.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package highlighterfragmenter package highlighterfragmenter import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/highlighting.ts#L163-L166 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/highlighting.ts#L163-L166 type HighlighterFragmenter struct { Name string } diff --git a/typedapi/types/enums/highlighterorder/highlighterorder.go b/typedapi/types/enums/highlighterorder/highlighterorder.go index 41def59b0d..1145732684 100644 --- a/typedapi/types/enums/highlighterorder/highlighterorder.go +++ b/typedapi/types/enums/highlighterorder/highlighterorder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package highlighterorder package highlighterorder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/highlighting.ts#L168-L170 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/highlighting.ts#L168-L170 type HighlighterOrder struct { Name string } diff --git a/typedapi/types/enums/highlightertagsschema/highlightertagsschema.go b/typedapi/types/enums/highlightertagsschema/highlightertagsschema.go index 3c08a4ea87..6ab879eed0 100644 --- a/typedapi/types/enums/highlightertagsschema/highlightertagsschema.go +++ b/typedapi/types/enums/highlightertagsschema/highlightertagsschema.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package highlightertagsschema package highlightertagsschema import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/highlighting.ts#L172-L174 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/highlighting.ts#L172-L174 type HighlighterTagsSchema struct { Name string } diff --git a/typedapi/types/enums/highlightertype/highlightertype.go b/typedapi/types/enums/highlightertype/highlightertype.go index c37094723d..a071d3fc4a 100644 --- a/typedapi/types/enums/highlightertype/highlightertype.go +++ b/typedapi/types/enums/highlightertype/highlightertype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package highlightertype package highlightertype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/highlighting.ts#L176-L191 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/highlighting.ts#L176-L191 type HighlighterType struct { Name string } diff --git a/typedapi/types/enums/holtwinterstype/holtwinterstype.go b/typedapi/types/enums/holtwinterstype/holtwinterstype.go index 8b2bdb46b7..d31ce78bf8 100644 --- a/typedapi/types/enums/holtwinterstype/holtwinterstype.go +++ b/typedapi/types/enums/holtwinterstype/holtwinterstype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package holtwinterstype package holtwinterstype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L283-L286 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L283-L286 type HoltWintersType struct { Name string } diff --git a/typedapi/types/enums/httpinputmethod/httpinputmethod.go b/typedapi/types/enums/httpinputmethod/httpinputmethod.go index b9b542a8c3..c7c85f301e 100644 --- a/typedapi/types/enums/httpinputmethod/httpinputmethod.go +++ b/typedapi/types/enums/httpinputmethod/httpinputmethod.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package httpinputmethod package httpinputmethod import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L59-L65 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L59-L65 type HttpInputMethod struct { Name string } diff --git a/typedapi/types/enums/ibdistribution/ibdistribution.go b/typedapi/types/enums/ibdistribution/ibdistribution.go index fe8f744ca0..9770f319b3 100644 --- a/typedapi/types/enums/ibdistribution/ibdistribution.go +++ b/typedapi/types/enums/ibdistribution/ibdistribution.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package ibdistribution package ibdistribution import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Similarity.ts#L42-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Similarity.ts#L42-L45 type IBDistribution struct { Name string } diff --git a/typedapi/types/enums/iblambda/iblambda.go b/typedapi/types/enums/iblambda/iblambda.go index 9e76a9384f..22872e3c44 100644 --- a/typedapi/types/enums/iblambda/iblambda.go +++ b/typedapi/types/enums/iblambda/iblambda.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package iblambda package iblambda import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Similarity.ts#L47-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Similarity.ts#L47-L50 type IBLambda struct { Name string } diff --git a/typedapi/types/enums/icucollationalternate/icucollationalternate.go b/typedapi/types/enums/icucollationalternate/icucollationalternate.go index eed0460ebb..f6fbd70196 100644 --- a/typedapi/types/enums/icucollationalternate/icucollationalternate.go +++ b/typedapi/types/enums/icucollationalternate/icucollationalternate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package icucollationalternate package icucollationalternate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L89-L92 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L89-L92 type IcuCollationAlternate struct { Name string } diff --git a/typedapi/types/enums/icucollationcasefirst/icucollationcasefirst.go b/typedapi/types/enums/icucollationcasefirst/icucollationcasefirst.go index 42aaf5f64f..f1a772a457 100644 --- a/typedapi/types/enums/icucollationcasefirst/icucollationcasefirst.go +++ b/typedapi/types/enums/icucollationcasefirst/icucollationcasefirst.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package icucollationcasefirst package icucollationcasefirst import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L94-L97 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L94-L97 type IcuCollationCaseFirst struct { Name string } diff --git a/typedapi/types/enums/icucollationdecomposition/icucollationdecomposition.go b/typedapi/types/enums/icucollationdecomposition/icucollationdecomposition.go index 2f2592cc5f..d771000ebb 100644 --- a/typedapi/types/enums/icucollationdecomposition/icucollationdecomposition.go +++ b/typedapi/types/enums/icucollationdecomposition/icucollationdecomposition.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package icucollationdecomposition package icucollationdecomposition import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L99-L102 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L99-L102 type IcuCollationDecomposition struct { Name string } diff --git a/typedapi/types/enums/icucollationstrength/icucollationstrength.go b/typedapi/types/enums/icucollationstrength/icucollationstrength.go index b7f476ff24..e67d702450 100644 --- a/typedapi/types/enums/icucollationstrength/icucollationstrength.go +++ b/typedapi/types/enums/icucollationstrength/icucollationstrength.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package icucollationstrength package icucollationstrength import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L104-L110 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L104-L110 type IcuCollationStrength struct { Name string } diff --git a/typedapi/types/enums/icunormalizationmode/icunormalizationmode.go b/typedapi/types/enums/icunormalizationmode/icunormalizationmode.go index 871bccf555..50bd3a95b0 100644 --- a/typedapi/types/enums/icunormalizationmode/icunormalizationmode.go +++ b/typedapi/types/enums/icunormalizationmode/icunormalizationmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package icunormalizationmode package icunormalizationmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L78-L81 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L78-L81 type IcuNormalizationMode struct { Name string } diff --git a/typedapi/types/enums/icunormalizationtype/icunormalizationtype.go b/typedapi/types/enums/icunormalizationtype/icunormalizationtype.go index 6e90d88596..503620a292 100644 --- a/typedapi/types/enums/icunormalizationtype/icunormalizationtype.go +++ b/typedapi/types/enums/icunormalizationtype/icunormalizationtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package icunormalizationtype package icunormalizationtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L83-L87 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L83-L87 type IcuNormalizationType struct { Name string } diff --git a/typedapi/types/enums/icutransformdirection/icutransformdirection.go b/typedapi/types/enums/icutransformdirection/icutransformdirection.go index 2c0eba94d0..9e9e9605d3 100644 --- a/typedapi/types/enums/icutransformdirection/icutransformdirection.go +++ b/typedapi/types/enums/icutransformdirection/icutransformdirection.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package icutransformdirection package icutransformdirection import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L73-L76 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L73-L76 type IcuTransformDirection struct { Name string } diff --git a/typedapi/types/enums/impactarea/impactarea.go b/typedapi/types/enums/impactarea/impactarea.go index 371caea8ca..7d635232f8 100644 --- a/typedapi/types/enums/impactarea/impactarea.go +++ b/typedapi/types/enums/impactarea/impactarea.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package impactarea package impactarea import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L72-L77 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L72-L77 type ImpactArea struct { Name string } diff --git a/typedapi/types/enums/include/include.go b/typedapi/types/enums/include/include.go index 59e6d65ee5..0f0d5f11dc 100644 --- a/typedapi/types/enums/include/include.go +++ b/typedapi/types/enums/include/include.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package include package include import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Include.ts#L20-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Include.ts#L20-L47 type Include struct { Name string } diff --git a/typedapi/types/enums/indexcheckonstartup/indexcheckonstartup.go b/typedapi/types/enums/indexcheckonstartup/indexcheckonstartup.go index 777c2a267a..8b2aa38e56 100644 --- a/typedapi/types/enums/indexcheckonstartup/indexcheckonstartup.go +++ b/typedapi/types/enums/indexcheckonstartup/indexcheckonstartup.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package indexcheckonstartup package indexcheckonstartup import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L256-L263 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L260-L267 type IndexCheckOnStartup struct { Name string } diff --git a/typedapi/types/enums/indexingjobstate/indexingjobstate.go b/typedapi/types/enums/indexingjobstate/indexingjobstate.go index d91dc3d8e1..51a07f93b8 100644 --- a/typedapi/types/enums/indexingjobstate/indexingjobstate.go +++ b/typedapi/types/enums/indexingjobstate/indexingjobstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package indexingjobstate package indexingjobstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_jobs/types.ts#L66-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_jobs/types.ts#L66-L72 type IndexingJobState struct { Name string } diff --git a/typedapi/types/enums/indexmetadatastate/indexmetadatastate.go b/typedapi/types/enums/indexmetadatastate/indexmetadatastate.go index e2f64c8885..02394c22ba 100644 --- a/typedapi/types/enums/indexmetadatastate/indexmetadatastate.go +++ b/typedapi/types/enums/indexmetadatastate/indexmetadatastate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package indexmetadatastate package indexmetadatastate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L225-L232 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L225-L232 type IndexMetadataState struct { Name string } diff --git a/typedapi/types/enums/indexoptions/indexoptions.go b/typedapi/types/enums/indexoptions/indexoptions.go index 2a39516427..347887dde8 100644 --- a/typedapi/types/enums/indexoptions/indexoptions.go +++ b/typedapi/types/enums/indexoptions/indexoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package indexoptions package indexoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L242-L247 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L243-L248 type IndexOptions struct { Name string } diff --git a/typedapi/types/enums/indexprivilege/indexprivilege.go b/typedapi/types/enums/indexprivilege/indexprivilege.go index 4a5232d0f3..2a5057d9ba 100644 --- a/typedapi/types/enums/indexprivilege/indexprivilege.go +++ b/typedapi/types/enums/indexprivilege/indexprivilege.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package indexprivilege package indexprivilege import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/Privileges.ts#L166-L187 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/Privileges.ts#L166-L187 type IndexPrivilege struct { Name string } diff --git a/typedapi/types/enums/indexroutingallocationoptions/indexroutingallocationoptions.go b/typedapi/types/enums/indexroutingallocationoptions/indexroutingallocationoptions.go index 07a5535f29..26d03ca812 100644 --- a/typedapi/types/enums/indexroutingallocationoptions/indexroutingallocationoptions.go +++ b/typedapi/types/enums/indexroutingallocationoptions/indexroutingallocationoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package indexroutingallocationoptions package indexroutingallocationoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexRouting.ts#L38-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexRouting.ts#L38-L43 type IndexRoutingAllocationOptions struct { Name string } diff --git a/typedapi/types/enums/indexroutingrebalanceoptions/indexroutingrebalanceoptions.go b/typedapi/types/enums/indexroutingrebalanceoptions/indexroutingrebalanceoptions.go index 0917b18f40..a05c92b96a 100644 --- a/typedapi/types/enums/indexroutingrebalanceoptions/indexroutingrebalanceoptions.go +++ b/typedapi/types/enums/indexroutingrebalanceoptions/indexroutingrebalanceoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package indexroutingrebalanceoptions package indexroutingrebalanceoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexRouting.ts#L45-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexRouting.ts#L45-L50 type IndexRoutingRebalanceOptions struct { Name string } diff --git a/typedapi/types/enums/indicatorhealthstatus/indicatorhealthstatus.go b/typedapi/types/enums/indicatorhealthstatus/indicatorhealthstatus.go index 54583e6981..070c0addc7 100644 --- a/typedapi/types/enums/indicatorhealthstatus/indicatorhealthstatus.go +++ b/typedapi/types/enums/indicatorhealthstatus/indicatorhealthstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package indicatorhealthstatus package indicatorhealthstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L25-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L25-L30 type IndicatorHealthStatus struct { Name string } diff --git a/typedapi/types/enums/indicesblockoptions/indicesblockoptions.go b/typedapi/types/enums/indicesblockoptions/indicesblockoptions.go index 85f4d095e7..33c6a8f8e2 100644 --- a/typedapi/types/enums/indicesblockoptions/indicesblockoptions.go +++ b/typedapi/types/enums/indicesblockoptions/indicesblockoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package indicesblockoptions package indicesblockoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/add_block/IndicesAddBlockRequest.ts#L43-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/add_block/IndicesAddBlockRequest.ts#L43-L48 type IndicesBlockOptions struct { Name string } diff --git a/typedapi/types/enums/inputtype/inputtype.go b/typedapi/types/enums/inputtype/inputtype.go index 2c5732e3be..e1dc9b6001 100644 --- a/typedapi/types/enums/inputtype/inputtype.go +++ b/typedapi/types/enums/inputtype/inputtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package inputtype package inputtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L100-L104 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L100-L104 type InputType struct { Name string } diff --git a/typedapi/types/enums/jobblockedreason/jobblockedreason.go b/typedapi/types/enums/jobblockedreason/jobblockedreason.go index ab04ee9b7c..e782fed27d 100644 --- a/typedapi/types/enums/jobblockedreason/jobblockedreason.go +++ b/typedapi/types/enums/jobblockedreason/jobblockedreason.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package jobblockedreason package jobblockedreason import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Job.ts#L397-L401 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Job.ts#L397-L401 type JobBlockedReason struct { Name string } diff --git a/typedapi/types/enums/jobstate/jobstate.go b/typedapi/types/enums/jobstate/jobstate.go index 0a31bd5fca..2fd5df86dc 100644 --- a/typedapi/types/enums/jobstate/jobstate.go +++ b/typedapi/types/enums/jobstate/jobstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package jobstate package jobstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Job.ts#L36-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Job.ts#L36-L52 type JobState struct { Name string } diff --git a/typedapi/types/enums/jsonprocessorconflictstrategy/jsonprocessorconflictstrategy.go b/typedapi/types/enums/jsonprocessorconflictstrategy/jsonprocessorconflictstrategy.go index 0436e567bc..4a4b431b55 100644 --- a/typedapi/types/enums/jsonprocessorconflictstrategy/jsonprocessorconflictstrategy.go +++ b/typedapi/types/enums/jsonprocessorconflictstrategy/jsonprocessorconflictstrategy.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package jsonprocessorconflictstrategy package jsonprocessorconflictstrategy import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L849-L854 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L849-L854 type JsonProcessorConflictStrategy struct { Name string } diff --git a/typedapi/types/enums/keeptypesmode/keeptypesmode.go b/typedapi/types/enums/keeptypesmode/keeptypesmode.go index 5effb890ba..5b1f0ef9b3 100644 --- a/typedapi/types/enums/keeptypesmode/keeptypesmode.go +++ b/typedapi/types/enums/keeptypesmode/keeptypesmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package keeptypesmode package keeptypesmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L213-L216 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L213-L216 type KeepTypesMode struct { Name string } diff --git a/typedapi/types/enums/kuromojitokenizationmode/kuromojitokenizationmode.go b/typedapi/types/enums/kuromojitokenizationmode/kuromojitokenizationmode.go index 1650414017..5686d2817d 100644 --- a/typedapi/types/enums/kuromojitokenizationmode/kuromojitokenizationmode.go +++ b/typedapi/types/enums/kuromojitokenizationmode/kuromojitokenizationmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package kuromojitokenizationmode package kuromojitokenizationmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/kuromoji-plugin.ts#L52-L56 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/kuromoji-plugin.ts#L52-L56 type KuromojiTokenizationMode struct { Name string } diff --git a/typedapi/types/enums/language/language.go b/typedapi/types/enums/language/language.go index e89a2dbdec..70e5c15384 100644 --- a/typedapi/types/enums/language/language.go +++ b/typedapi/types/enums/language/language.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package language package language import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/languages.ts#L20-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/languages.ts#L20-L55 type Language struct { Name string } diff --git a/typedapi/types/enums/level/level.go b/typedapi/types/enums/level/level.go index 339e36001f..df9aebf9c1 100644 --- a/typedapi/types/enums/level/level.go +++ b/typedapi/types/enums/level/level.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package level package level import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L249-L253 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L249-L253 type Level struct { Name string } diff --git a/typedapi/types/enums/licensestatus/licensestatus.go b/typedapi/types/enums/licensestatus/licensestatus.go index ed6c3579a8..62ae73cb4e 100644 --- a/typedapi/types/enums/licensestatus/licensestatus.go +++ b/typedapi/types/enums/licensestatus/licensestatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package licensestatus package licensestatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/license/_types/License.ts#L35-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/license/_types/License.ts#L35-L40 type LicenseStatus struct { Name string } diff --git a/typedapi/types/enums/licensetype/licensetype.go b/typedapi/types/enums/licensetype/licensetype.go index 38ab155c39..0fa8ae0ff2 100644 --- a/typedapi/types/enums/licensetype/licensetype.go +++ b/typedapi/types/enums/licensetype/licensetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package licensetype package licensetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/license/_types/License.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/license/_types/License.ts#L23-L33 type LicenseType struct { Name string } diff --git a/typedapi/types/enums/lifecycleoperationmode/lifecycleoperationmode.go b/typedapi/types/enums/lifecycleoperationmode/lifecycleoperationmode.go index 85c3c1a96b..a46a6b26bb 100644 --- a/typedapi/types/enums/lifecycleoperationmode/lifecycleoperationmode.go +++ b/typedapi/types/enums/lifecycleoperationmode/lifecycleoperationmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package lifecycleoperationmode package lifecycleoperationmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Lifecycle.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Lifecycle.ts#L20-L24 type LifecycleOperationMode struct { Name string } diff --git a/typedapi/types/enums/managedby/managedby.go b/typedapi/types/enums/managedby/managedby.go index 743a5b36cf..10e559aa2a 100644 --- a/typedapi/types/enums/managedby/managedby.go +++ b/typedapi/types/enums/managedby/managedby.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package managedby package managedby import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/DataStream.ts#L32-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/DataStream.ts#L32-L37 type ManagedBy struct { Name string } diff --git a/typedapi/types/enums/matchtype/matchtype.go b/typedapi/types/enums/matchtype/matchtype.go index 80eda061c8..000384314f 100644 --- a/typedapi/types/enums/matchtype/matchtype.go +++ b/typedapi/types/enums/matchtype/matchtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package matchtype package matchtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/dynamic-template.ts#L32-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/dynamic-template.ts#L32-L35 type MatchType struct { Name string } diff --git a/typedapi/types/enums/memorystatus/memorystatus.go b/typedapi/types/enums/memorystatus/memorystatus.go index f4f369a6ca..ef1ee51d9c 100644 --- a/typedapi/types/enums/memorystatus/memorystatus.go +++ b/typedapi/types/enums/memorystatus/memorystatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package memorystatus package memorystatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Model.ts#L88-L92 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Model.ts#L88-L92 type MemoryStatus struct { Name string } diff --git a/typedapi/types/enums/metric/metric.go b/typedapi/types/enums/metric/metric.go index a001aef144..6579090bfc 100644 --- a/typedapi/types/enums/metric/metric.go +++ b/typedapi/types/enums/metric/metric.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package metric package metric import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/_types/Metric.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/_types/Metric.ts#L22-L28 type Metric struct { Name string } diff --git a/typedapi/types/enums/migrationstatus/migrationstatus.go b/typedapi/types/enums/migrationstatus/migrationstatus.go index 3678d4117c..fa67a1b195 100644 --- a/typedapi/types/enums/migrationstatus/migrationstatus.go +++ b/typedapi/types/enums/migrationstatus/migrationstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package migrationstatus package migrationstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L30-L35 type MigrationStatus struct { Name string } diff --git a/typedapi/types/enums/minimuminterval/minimuminterval.go b/typedapi/types/enums/minimuminterval/minimuminterval.go index 5f6d9189ac..4bdd396b37 100644 --- a/typedapi/types/enums/minimuminterval/minimuminterval.go +++ b/typedapi/types/enums/minimuminterval/minimuminterval.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package minimuminterval package minimuminterval import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L102-L109 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L102-L109 type MinimumInterval struct { Name string } diff --git a/typedapi/types/enums/missingorder/missingorder.go b/typedapi/types/enums/missingorder/missingorder.go index 473fb7b4c6..8b74c53604 100644 --- a/typedapi/types/enums/missingorder/missingorder.go +++ b/typedapi/types/enums/missingorder/missingorder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package missingorder package missingorder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/AggregationContainer.ts#L518-L522 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/AggregationContainer.ts#L518-L522 type MissingOrder struct { Name string } diff --git a/typedapi/types/enums/month/month.go b/typedapi/types/enums/month/month.go index 67c63907b1..0b87b12da9 100644 --- a/typedapi/types/enums/month/month.go +++ b/typedapi/types/enums/month/month.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package month package month import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Schedule.ts#L65-L78 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Schedule.ts#L65-L78 type Month struct { Name string } diff --git a/typedapi/types/enums/multivaluemode/multivaluemode.go b/typedapi/types/enums/multivaluemode/multivaluemode.go index 7928a8190a..b7b695dad5 100644 --- a/typedapi/types/enums/multivaluemode/multivaluemode.go +++ b/typedapi/types/enums/multivaluemode/multivaluemode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package multivaluemode package multivaluemode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L343-L360 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L343-L360 type MultiValueMode struct { Name string } diff --git a/typedapi/types/enums/noderole/noderole.go b/typedapi/types/enums/noderole/noderole.go index a7b1397916..1100aa4de3 100644 --- a/typedapi/types/enums/noderole/noderole.go +++ b/typedapi/types/enums/noderole/noderole.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package noderole package noderole import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Node.ts#L77-L95 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Node.ts#L77-L95 type NodeRole struct { Name string } diff --git a/typedapi/types/enums/noridecompoundmode/noridecompoundmode.go b/typedapi/types/enums/noridecompoundmode/noridecompoundmode.go index f03a14c33b..626378141a 100644 --- a/typedapi/types/enums/noridecompoundmode/noridecompoundmode.go +++ b/typedapi/types/enums/noridecompoundmode/noridecompoundmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package noridecompoundmode package noridecompoundmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L75-L79 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L75-L79 type NoriDecompoundMode struct { Name string } diff --git a/typedapi/types/enums/normalization/normalization.go b/typedapi/types/enums/normalization/normalization.go index fde5baafa6..0191b7cf82 100644 --- a/typedapi/types/enums/normalization/normalization.go +++ b/typedapi/types/enums/normalization/normalization.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package normalization package normalization import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Similarity.ts#L52-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Similarity.ts#L52-L58 type Normalization struct { Name string } diff --git a/typedapi/types/enums/normalizemethod/normalizemethod.go b/typedapi/types/enums/normalizemethod/normalizemethod.go index 1439130deb..2484589bef 100644 --- a/typedapi/types/enums/normalizemethod/normalizemethod.go +++ b/typedapi/types/enums/normalizemethod/normalizemethod.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package normalizemethod package normalizemethod import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L326-L352 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L326-L352 type NormalizeMethod struct { Name string } diff --git a/typedapi/types/enums/numericfielddataformat/numericfielddataformat.go b/typedapi/types/enums/numericfielddataformat/numericfielddataformat.go index 67cbabf647..c489157890 100644 --- a/typedapi/types/enums/numericfielddataformat/numericfielddataformat.go +++ b/typedapi/types/enums/numericfielddataformat/numericfielddataformat.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package numericfielddataformat package numericfielddataformat import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/NumericFielddataFormat.ts#L20-L23 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/NumericFielddataFormat.ts#L20-L23 type NumericFielddataFormat struct { Name string } diff --git a/typedapi/types/enums/onscripterror/onscripterror.go b/typedapi/types/enums/onscripterror/onscripterror.go index 52fb20498b..3344379e20 100644 --- a/typedapi/types/enums/onscripterror/onscripterror.go +++ b/typedapi/types/enums/onscripterror/onscripterror.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package onscripterror package onscripterror import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L129-L132 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L129-L132 type OnScriptError struct { Name string } diff --git a/typedapi/types/enums/operationtype/operationtype.go b/typedapi/types/enums/operationtype/operationtype.go index 4fe4c90222..5bfa6e4646 100644 --- a/typedapi/types/enums/operationtype/operationtype.go +++ b/typedapi/types/enums/operationtype/operationtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package operationtype package operationtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/bulk/types.ts#L83-L88 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/bulk/types.ts#L83-L88 type OperationType struct { Name string } diff --git a/typedapi/types/enums/operator/operator.go b/typedapi/types/enums/operator/operator.go index 30a061207d..5fc3e16e4d 100644 --- a/typedapi/types/enums/operator/operator.go +++ b/typedapi/types/enums/operator/operator.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package operator package operator import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/Operator.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/Operator.ts#L22-L27 type Operator struct { Name string } diff --git a/typedapi/types/enums/optype/optype.go b/typedapi/types/enums/optype/optype.go index 4b1d1842eb..753a3267c6 100644 --- a/typedapi/types/enums/optype/optype.go +++ b/typedapi/types/enums/optype/optype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package optype package optype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L255-L264 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L255-L264 type OpType struct { Name string } diff --git a/typedapi/types/enums/pagerdutycontexttype/pagerdutycontexttype.go b/typedapi/types/enums/pagerdutycontexttype/pagerdutycontexttype.go index 0b4650f730..46f1a1ac7b 100644 --- a/typedapi/types/enums/pagerdutycontexttype/pagerdutycontexttype.go +++ b/typedapi/types/enums/pagerdutycontexttype/pagerdutycontexttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package pagerdutycontexttype package pagerdutycontexttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L67-L70 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L67-L70 type PagerDutyContextType struct { Name string } diff --git a/typedapi/types/enums/pagerdutyeventtype/pagerdutyeventtype.go b/typedapi/types/enums/pagerdutyeventtype/pagerdutyeventtype.go index afec6d93bf..97d661ac4b 100644 --- a/typedapi/types/enums/pagerdutyeventtype/pagerdutyeventtype.go +++ b/typedapi/types/enums/pagerdutyeventtype/pagerdutyeventtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package pagerdutyeventtype package pagerdutyeventtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L72-L76 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L72-L76 type PagerDutyEventType struct { Name string } diff --git a/typedapi/types/enums/phoneticencoder/phoneticencoder.go b/typedapi/types/enums/phoneticencoder/phoneticencoder.go index 76d4d34fd7..a33fcec104 100644 --- a/typedapi/types/enums/phoneticencoder/phoneticencoder.go +++ b/typedapi/types/enums/phoneticencoder/phoneticencoder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package phoneticencoder package phoneticencoder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/phonetic-plugin.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/phonetic-plugin.ts#L23-L36 type PhoneticEncoder struct { Name string } diff --git a/typedapi/types/enums/phoneticlanguage/phoneticlanguage.go b/typedapi/types/enums/phoneticlanguage/phoneticlanguage.go index cf58067aee..420d9e2bbb 100644 --- a/typedapi/types/enums/phoneticlanguage/phoneticlanguage.go +++ b/typedapi/types/enums/phoneticlanguage/phoneticlanguage.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package phoneticlanguage package phoneticlanguage import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/phonetic-plugin.ts#L38-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/phonetic-plugin.ts#L38-L51 type PhoneticLanguage struct { Name string } diff --git a/typedapi/types/enums/phoneticnametype/phoneticnametype.go b/typedapi/types/enums/phoneticnametype/phoneticnametype.go index cddbc03132..67331c828a 100644 --- a/typedapi/types/enums/phoneticnametype/phoneticnametype.go +++ b/typedapi/types/enums/phoneticnametype/phoneticnametype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package phoneticnametype package phoneticnametype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/phonetic-plugin.ts#L53-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/phonetic-plugin.ts#L53-L57 type PhoneticNameType struct { Name string } diff --git a/typedapi/types/enums/phoneticruletype/phoneticruletype.go b/typedapi/types/enums/phoneticruletype/phoneticruletype.go index b5f56a81df..acf8172dce 100644 --- a/typedapi/types/enums/phoneticruletype/phoneticruletype.go +++ b/typedapi/types/enums/phoneticruletype/phoneticruletype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package phoneticruletype package phoneticruletype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/phonetic-plugin.ts#L59-L62 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/phonetic-plugin.ts#L59-L62 type PhoneticRuleType struct { Name string } diff --git a/typedapi/types/enums/policytype/policytype.go b/typedapi/types/enums/policytype/policytype.go index bdd12b0546..3ce94d81b7 100644 --- a/typedapi/types/enums/policytype/policytype.go +++ b/typedapi/types/enums/policytype/policytype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package policytype package policytype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/_types/Policy.ts#L28-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/_types/Policy.ts#L28-L32 type PolicyType struct { Name string } diff --git a/typedapi/types/enums/quantifier/quantifier.go b/typedapi/types/enums/quantifier/quantifier.go index 303b734f41..f29c3ffbb9 100644 --- a/typedapi/types/enums/quantifier/quantifier.go +++ b/typedapi/types/enums/quantifier/quantifier.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package quantifier package quantifier import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Conditions.ts#L71-L74 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Conditions.ts#L71-L74 type Quantifier struct { Name string } diff --git a/typedapi/types/enums/queryrulecriteriatype/queryrulecriteriatype.go b/typedapi/types/enums/queryrulecriteriatype/queryrulecriteriatype.go index c12f81e92c..55c97987f3 100644 --- a/typedapi/types/enums/queryrulecriteriatype/queryrulecriteriatype.go +++ b/typedapi/types/enums/queryrulecriteriatype/queryrulecriteriatype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package queryrulecriteriatype package queryrulecriteriatype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/query_ruleset/_types/QueryRuleset.ts#L54-L65 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/query_ruleset/_types/QueryRuleset.ts#L54-L65 type QueryRuleCriteriaType struct { Name string } diff --git a/typedapi/types/enums/queryruletype/queryruletype.go b/typedapi/types/enums/queryruletype/queryruletype.go index 7fed8739ef..c5a15491dc 100644 --- a/typedapi/types/enums/queryruletype/queryruletype.go +++ b/typedapi/types/enums/queryruletype/queryruletype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package queryruletype package queryruletype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/query_ruleset/_types/QueryRuleset.ts#L44-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/query_ruleset/_types/QueryRuleset.ts#L44-L46 type QueryRuleType struct { Name string } diff --git a/typedapi/types/enums/rangerelation/rangerelation.go b/typedapi/types/enums/rangerelation/rangerelation.go index 8dbbd9d06e..9d500a80f2 100644 --- a/typedapi/types/enums/rangerelation/rangerelation.go +++ b/typedapi/types/enums/rangerelation/rangerelation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package rangerelation package rangerelation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L170-L183 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L170-L183 type RangeRelation struct { Name string } diff --git a/typedapi/types/enums/ratemode/ratemode.go b/typedapi/types/enums/ratemode/ratemode.go index bb12346026..a12a20511d 100644 --- a/typedapi/types/enums/ratemode/ratemode.go +++ b/typedapi/types/enums/ratemode/ratemode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package ratemode package ratemode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L243-L252 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L243-L252 type RateMode struct { Name string } diff --git a/typedapi/types/enums/refresh/refresh.go b/typedapi/types/enums/refresh/refresh.go index 82d7703172..b37da11664 100644 --- a/typedapi/types/enums/refresh/refresh.go +++ b/typedapi/types/enums/refresh/refresh.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package refresh package refresh import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L266-L273 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L266-L273 type Refresh struct { Name string } diff --git a/typedapi/types/enums/responsecontenttype/responsecontenttype.go b/typedapi/types/enums/responsecontenttype/responsecontenttype.go index f35c9462d1..9eaf2e8d0b 100644 --- a/typedapi/types/enums/responsecontenttype/responsecontenttype.go +++ b/typedapi/types/enums/responsecontenttype/responsecontenttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package responsecontenttype package responsecontenttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L106-L110 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L106-L110 type ResponseContentType struct { Name string } diff --git a/typedapi/types/enums/result/result.go b/typedapi/types/enums/result/result.go index 5f5f0b60f2..613f4a40fd 100644 --- a/typedapi/types/enums/result/result.go +++ b/typedapi/types/enums/result/result.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package result package result import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Result.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Result.ts#L20-L26 type Result struct { Name string } diff --git a/typedapi/types/enums/resultposition/resultposition.go b/typedapi/types/enums/resultposition/resultposition.go index 0136cdfac9..745e0a0055 100644 --- a/typedapi/types/enums/resultposition/resultposition.go +++ b/typedapi/types/enums/resultposition/resultposition.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package resultposition package resultposition import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/eql/search/types.ts#L20-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/eql/search/types.ts#L20-L32 type ResultPosition struct { Name string } diff --git a/typedapi/types/enums/routingstate/routingstate.go b/typedapi/types/enums/routingstate/routingstate.go index e14b90f65b..09ff6cabd4 100644 --- a/typedapi/types/enums/routingstate/routingstate.go +++ b/typedapi/types/enums/routingstate/routingstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package routingstate package routingstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L351-L372 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L351-L372 type RoutingState struct { Name string } diff --git a/typedapi/types/enums/ruleaction/ruleaction.go b/typedapi/types/enums/ruleaction/ruleaction.go index 2fec130dcd..7d4eb7cd38 100644 --- a/typedapi/types/enums/ruleaction/ruleaction.go +++ b/typedapi/types/enums/ruleaction/ruleaction.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package ruleaction package ruleaction import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Rule.ts#L41-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Rule.ts#L41-L50 type RuleAction struct { Name string } diff --git a/typedapi/types/enums/runtimefieldtype/runtimefieldtype.go b/typedapi/types/enums/runtimefieldtype/runtimefieldtype.go index fb41546d79..468b7f6a12 100644 --- a/typedapi/types/enums/runtimefieldtype/runtimefieldtype.go +++ b/typedapi/types/enums/runtimefieldtype/runtimefieldtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package runtimefieldtype package runtimefieldtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/RuntimeFields.ts#L56-L65 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/RuntimeFields.ts#L56-L65 type RuntimeFieldType struct { Name string } diff --git a/typedapi/types/enums/sampleraggregationexecutionhint/sampleraggregationexecutionhint.go b/typedapi/types/enums/sampleraggregationexecutionhint/sampleraggregationexecutionhint.go index cecdc5b81f..51883d2110 100644 --- a/typedapi/types/enums/sampleraggregationexecutionhint/sampleraggregationexecutionhint.go +++ b/typedapi/types/enums/sampleraggregationexecutionhint/sampleraggregationexecutionhint.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package sampleraggregationexecutionhint package sampleraggregationexecutionhint import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L343-L356 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L343-L356 type SamplerAggregationExecutionHint struct { Name string } diff --git a/typedapi/types/enums/scoremode/scoremode.go b/typedapi/types/enums/scoremode/scoremode.go index e8cd139d1b..9e477537ba 100644 --- a/typedapi/types/enums/scoremode/scoremode.go +++ b/typedapi/types/enums/scoremode/scoremode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package scoremode package scoremode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/rescoring.ts#L52-L74 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/rescoring.ts#L52-L74 type ScoreMode struct { Name string } diff --git a/typedapi/types/enums/scriptlanguage/scriptlanguage.go b/typedapi/types/enums/scriptlanguage/scriptlanguage.go index 0ca37de211..7aa912561a 100644 --- a/typedapi/types/enums/scriptlanguage/scriptlanguage.go +++ b/typedapi/types/enums/scriptlanguage/scriptlanguage.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package scriptlanguage package scriptlanguage import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Scripting.ts#L24-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Scripting.ts#L24-L45 type ScriptLanguage struct { Name string } diff --git a/typedapi/types/enums/scriptsorttype/scriptsorttype.go b/typedapi/types/enums/scriptsorttype/scriptsorttype.go index d5dd55a7b9..82949e3341 100644 --- a/typedapi/types/enums/scriptsorttype/scriptsorttype.go +++ b/typedapi/types/enums/scriptsorttype/scriptsorttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package scriptsorttype package scriptsorttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/sort.ts#L76-L80 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/sort.ts#L76-L80 type ScriptSortType struct { Name string } diff --git a/typedapi/types/enums/searchtype/searchtype.go b/typedapi/types/enums/searchtype/searchtype.go index ae19c38ff3..099a75acdd 100644 --- a/typedapi/types/enums/searchtype/searchtype.go +++ b/typedapi/types/enums/searchtype/searchtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package searchtype package searchtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L275-L280 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L275-L280 type SearchType struct { Name string } diff --git a/typedapi/types/enums/segmentsortmissing/segmentsortmissing.go b/typedapi/types/enums/segmentsortmissing/segmentsortmissing.go index 5b4b034d9f..e075ab9315 100644 --- a/typedapi/types/enums/segmentsortmissing/segmentsortmissing.go +++ b/typedapi/types/enums/segmentsortmissing/segmentsortmissing.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package segmentsortmissing package segmentsortmissing import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSegmentSort.ts#L43-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSegmentSort.ts#L43-L46 type SegmentSortMissing struct { Name string } diff --git a/typedapi/types/enums/segmentsortmode/segmentsortmode.go b/typedapi/types/enums/segmentsortmode/segmentsortmode.go index cc081ea744..fde9db7f23 100644 --- a/typedapi/types/enums/segmentsortmode/segmentsortmode.go +++ b/typedapi/types/enums/segmentsortmode/segmentsortmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package segmentsortmode package segmentsortmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSegmentSort.ts#L36-L41 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSegmentSort.ts#L36-L41 type SegmentSortMode struct { Name string } diff --git a/typedapi/types/enums/segmentsortorder/segmentsortorder.go b/typedapi/types/enums/segmentsortorder/segmentsortorder.go index 23d2ce4663..3d7ac559b1 100644 --- a/typedapi/types/enums/segmentsortorder/segmentsortorder.go +++ b/typedapi/types/enums/segmentsortorder/segmentsortorder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package segmentsortorder package segmentsortorder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSegmentSort.ts#L29-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSegmentSort.ts#L29-L34 type SegmentSortOrder struct { Name string } diff --git a/typedapi/types/enums/shapetype/shapetype.go b/typedapi/types/enums/shapetype/shapetype.go index d649ff3c45..d6f65f72d2 100644 --- a/typedapi/types/enums/shapetype/shapetype.go +++ b/typedapi/types/enums/shapetype/shapetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package shapetype package shapetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L1070-L1073 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L1070-L1073 type ShapeType struct { Name string } diff --git a/typedapi/types/enums/shardroutingstate/shardroutingstate.go b/typedapi/types/enums/shardroutingstate/shardroutingstate.go index 4fa5853c97..b31958475e 100644 --- a/typedapi/types/enums/shardroutingstate/shardroutingstate.go +++ b/typedapi/types/enums/shardroutingstate/shardroutingstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package shardroutingstate package shardroutingstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L169-L174 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L169-L174 type ShardRoutingState struct { Name string } diff --git a/typedapi/types/enums/shardsstatsstage/shardsstatsstage.go b/typedapi/types/enums/shardsstatsstage/shardsstatsstage.go index 19b249d619..2e9955bce3 100644 --- a/typedapi/types/enums/shardsstatsstage/shardsstatsstage.go +++ b/typedapi/types/enums/shardsstatsstage/shardsstatsstage.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package shardsstatsstage package shardsstatsstage import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotShardsStatsStage.ts#L20-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotShardsStatsStage.ts#L20-L31 type ShardsStatsStage struct { Name string } diff --git a/typedapi/types/enums/shardstoreallocation/shardstoreallocation.go b/typedapi/types/enums/shardstoreallocation/shardstoreallocation.go index 880a50a5e8..a6b07bbd87 100644 --- a/typedapi/types/enums/shardstoreallocation/shardstoreallocation.go +++ b/typedapi/types/enums/shardstoreallocation/shardstoreallocation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package shardstoreallocation package shardstoreallocation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/shard_stores/types.ts#L45-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/shard_stores/types.ts#L45-L49 type ShardStoreAllocation struct { Name string } diff --git a/typedapi/types/enums/shardstorestatus/shardstorestatus.go b/typedapi/types/enums/shardstorestatus/shardstorestatus.go index ce50d92678..18ca2db770 100644 --- a/typedapi/types/enums/shardstorestatus/shardstorestatus.go +++ b/typedapi/types/enums/shardstorestatus/shardstorestatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package shardstorestatus package shardstorestatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/shard_stores/types.ts#L60-L69 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/shard_stores/types.ts#L60-L69 type ShardStoreStatus struct { Name string } diff --git a/typedapi/types/enums/shutdownstatus/shutdownstatus.go b/typedapi/types/enums/shutdownstatus/shutdownstatus.go index a229d108bd..542c7da9b8 100644 --- a/typedapi/types/enums/shutdownstatus/shutdownstatus.go +++ b/typedapi/types/enums/shutdownstatus/shutdownstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package shutdownstatus package shutdownstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L45-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L45-L50 type ShutdownStatus struct { Name string } diff --git a/typedapi/types/enums/shutdowntype/shutdowntype.go b/typedapi/types/enums/shutdowntype/shutdowntype.go index 1bc3d2c82d..498b961eb3 100644 --- a/typedapi/types/enums/shutdowntype/shutdowntype.go +++ b/typedapi/types/enums/shutdowntype/shutdowntype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package shutdowntype package shutdowntype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L40-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L40-L43 type ShutdownType struct { Name string } diff --git a/typedapi/types/enums/simplequerystringflag/simplequerystringflag.go b/typedapi/types/enums/simplequerystringflag/simplequerystringflag.go index 11e3fb3ea6..6a24fc16c4 100644 --- a/typedapi/types/enums/simplequerystringflag/simplequerystringflag.go +++ b/typedapi/types/enums/simplequerystringflag/simplequerystringflag.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package simplequerystringflag package simplequerystringflag import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L708-L763 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L708-L763 type SimpleQueryStringFlag struct { Name string } diff --git a/typedapi/types/enums/slicescalculation/slicescalculation.go b/typedapi/types/enums/slicescalculation/slicescalculation.go index 4b760f2ba3..0ce275433a 100644 --- a/typedapi/types/enums/slicescalculation/slicescalculation.go +++ b/typedapi/types/enums/slicescalculation/slicescalculation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package slicescalculation package slicescalculation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L368-L376 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L368-L376 type SlicesCalculation struct { Name string } diff --git a/typedapi/types/enums/snapshotsort/snapshotsort.go b/typedapi/types/enums/snapshotsort/snapshotsort.go index 1d60282b21..83bb25eb96 100644 --- a/typedapi/types/enums/snapshotsort/snapshotsort.go +++ b/typedapi/types/enums/snapshotsort/snapshotsort.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package snapshotsort package snapshotsort import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotInfo.ts#L73-L93 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotInfo.ts#L73-L93 type SnapshotSort struct { Name string } diff --git a/typedapi/types/enums/snapshotupgradestate/snapshotupgradestate.go b/typedapi/types/enums/snapshotupgradestate/snapshotupgradestate.go index eba8ff5eb7..6874b52b53 100644 --- a/typedapi/types/enums/snapshotupgradestate/snapshotupgradestate.go +++ b/typedapi/types/enums/snapshotupgradestate/snapshotupgradestate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package snapshotupgradestate package snapshotupgradestate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Model.ts#L94-L99 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Model.ts#L94-L99 type SnapshotUpgradeState struct { Name string } diff --git a/typedapi/types/enums/snowballlanguage/snowballlanguage.go b/typedapi/types/enums/snowballlanguage/snowballlanguage.go index e8164fb84d..f3d9f60eb4 100644 --- a/typedapi/types/enums/snowballlanguage/snowballlanguage.go +++ b/typedapi/types/enums/snowballlanguage/snowballlanguage.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package snowballlanguage package snowballlanguage import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/languages.ts#L57-L80 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/languages.ts#L57-L80 type SnowballLanguage struct { Name string } diff --git a/typedapi/types/enums/sortmode/sortmode.go b/typedapi/types/enums/sortmode/sortmode.go index c71ec671b8..b00c252ca0 100644 --- a/typedapi/types/enums/sortmode/sortmode.go +++ b/typedapi/types/enums/sortmode/sortmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package sortmode package sortmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/sort.ts#L103-L112 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/sort.ts#L103-L112 type SortMode struct { Name string } diff --git a/typedapi/types/enums/sortorder/sortorder.go b/typedapi/types/enums/sortorder/sortorder.go index 5d0dfb16ff..6d9fa06d1d 100644 --- a/typedapi/types/enums/sortorder/sortorder.go +++ b/typedapi/types/enums/sortorder/sortorder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package sortorder package sortorder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/sort.ts#L114-L123 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/sort.ts#L114-L123 type SortOrder struct { Name string } diff --git a/typedapi/types/enums/sourcefieldmode/sourcefieldmode.go b/typedapi/types/enums/sourcefieldmode/sourcefieldmode.go index f717042b55..59b0b5b3b9 100644 --- a/typedapi/types/enums/sourcefieldmode/sourcefieldmode.go +++ b/typedapi/types/enums/sourcefieldmode/sourcefieldmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package sourcefieldmode package sourcefieldmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/meta-fields.ts#L67-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/meta-fields.ts#L67-L75 type SourceFieldMode struct { Name string } diff --git a/typedapi/types/enums/statslevel/statslevel.go b/typedapi/types/enums/statslevel/statslevel.go index 83dde2a92f..0b81d7836a 100644 --- a/typedapi/types/enums/statslevel/statslevel.go +++ b/typedapi/types/enums/statslevel/statslevel.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package statslevel package statslevel import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/searchable_snapshots/_types/stats.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/searchable_snapshots/_types/stats.ts#L20-L24 type StatsLevel struct { Name string } diff --git a/typedapi/types/enums/storagetype/storagetype.go b/typedapi/types/enums/storagetype/storagetype.go index 3a36ee162f..f3fe4603a2 100644 --- a/typedapi/types/enums/storagetype/storagetype.go +++ b/typedapi/types/enums/storagetype/storagetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package storagetype package storagetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L505-L535 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L509-L539 type StorageType struct { Name string } diff --git a/typedapi/types/enums/stringdistance/stringdistance.go b/typedapi/types/enums/stringdistance/stringdistance.go index 49ace294b3..943e284332 100644 --- a/typedapi/types/enums/stringdistance/stringdistance.go +++ b/typedapi/types/enums/stringdistance/stringdistance.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package stringdistance package stringdistance import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L469-L490 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L469-L490 type StringDistance struct { Name string } diff --git a/typedapi/types/enums/suggestmode/suggestmode.go b/typedapi/types/enums/suggestmode/suggestmode.go index e07e4a512e..2ee7c33084 100644 --- a/typedapi/types/enums/suggestmode/suggestmode.go +++ b/typedapi/types/enums/suggestmode/suggestmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package suggestmode package suggestmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L282-L295 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L282-L295 type SuggestMode struct { Name string } diff --git a/typedapi/types/enums/suggestsort/suggestsort.go b/typedapi/types/enums/suggestsort/suggestsort.go index 15e1ef04e5..05c0a481ca 100644 --- a/typedapi/types/enums/suggestsort/suggestsort.go +++ b/typedapi/types/enums/suggestsort/suggestsort.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package suggestsort package suggestsort import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L492-L501 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L492-L501 type SuggestSort struct { Name string } diff --git a/typedapi/types/enums/synonymformat/synonymformat.go b/typedapi/types/enums/synonymformat/synonymformat.go index 50beee3cce..89e4b9cd17 100644 --- a/typedapi/types/enums/synonymformat/synonymformat.go +++ b/typedapi/types/enums/synonymformat/synonymformat.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package synonymformat package synonymformat import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L105-L108 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L105-L108 type SynonymFormat struct { Name string } diff --git a/typedapi/types/enums/tasktype/tasktype.go b/typedapi/types/enums/tasktype/tasktype.go index 008a687576..908e1e243d 100644 --- a/typedapi/types/enums/tasktype/tasktype.go +++ b/typedapi/types/enums/tasktype/tasktype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package tasktype package tasktype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/_types/TaskType.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/_types/TaskType.ts#L20-L26 type TaskType struct { Name string } diff --git a/typedapi/types/enums/templateformat/templateformat.go b/typedapi/types/enums/templateformat/templateformat.go index 146ec0a62a..2b870b9242 100644 --- a/typedapi/types/enums/templateformat/templateformat.go +++ b/typedapi/types/enums/templateformat/templateformat.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package templateformat package templateformat import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/RoleTemplate.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/RoleTemplate.ts#L22-L25 type TemplateFormat struct { Name string } diff --git a/typedapi/types/enums/termsaggregationcollectmode/termsaggregationcollectmode.go b/typedapi/types/enums/termsaggregationcollectmode/termsaggregationcollectmode.go index 07541cf3e8..c32cb8ea6b 100644 --- a/typedapi/types/enums/termsaggregationcollectmode/termsaggregationcollectmode.go +++ b/typedapi/types/enums/termsaggregationcollectmode/termsaggregationcollectmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package termsaggregationcollectmode package termsaggregationcollectmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L980-L989 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L980-L989 type TermsAggregationCollectMode struct { Name string } diff --git a/typedapi/types/enums/termsaggregationexecutionhint/termsaggregationexecutionhint.go b/typedapi/types/enums/termsaggregationexecutionhint/termsaggregationexecutionhint.go index 3a205e2fc9..802f39147d 100644 --- a/typedapi/types/enums/termsaggregationexecutionhint/termsaggregationexecutionhint.go +++ b/typedapi/types/enums/termsaggregationexecutionhint/termsaggregationexecutionhint.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package termsaggregationexecutionhint package termsaggregationexecutionhint import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L991-L996 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L991-L996 type TermsAggregationExecutionHint struct { Name string } diff --git a/typedapi/types/enums/termvectoroption/termvectoroption.go b/typedapi/types/enums/termvectoroption/termvectoroption.go index af78b7dcda..a9fc7aa46f 100644 --- a/typedapi/types/enums/termvectoroption/termvectoroption.go +++ b/typedapi/types/enums/termvectoroption/termvectoroption.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package termvectoroption package termvectoroption import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/TermVectorOption.ts#L20-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/TermVectorOption.ts#L20-L28 type TermVectorOption struct { Name string } diff --git a/typedapi/types/enums/textquerytype/textquerytype.go b/typedapi/types/enums/textquerytype/textquerytype.go index 6d8a1712a1..47198042df 100644 --- a/typedapi/types/enums/textquerytype/textquerytype.go +++ b/typedapi/types/enums/textquerytype/textquerytype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package textquerytype package textquerytype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L541-L567 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L541-L567 type TextQueryType struct { Name string } diff --git a/typedapi/types/enums/threadtype/threadtype.go b/typedapi/types/enums/threadtype/threadtype.go index 3594fd753c..3c3ff18c9d 100644 --- a/typedapi/types/enums/threadtype/threadtype.go +++ b/typedapi/types/enums/threadtype/threadtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package threadtype package threadtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L297-L303 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L297-L303 type ThreadType struct { Name string } diff --git a/typedapi/types/enums/timeseriesmetrictype/timeseriesmetrictype.go b/typedapi/types/enums/timeseriesmetrictype/timeseriesmetrictype.go index 88a58a8229..8fa64227bd 100644 --- a/typedapi/types/enums/timeseriesmetrictype/timeseriesmetrictype.go +++ b/typedapi/types/enums/timeseriesmetrictype/timeseriesmetrictype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package timeseriesmetrictype package timeseriesmetrictype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/TimeSeriesMetricType.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/TimeSeriesMetricType.ts#L20-L26 type TimeSeriesMetricType struct { Name string } diff --git a/typedapi/types/enums/timeunit/timeunit.go b/typedapi/types/enums/timeunit/timeunit.go index 07d325ce07..830ee586c2 100644 --- a/typedapi/types/enums/timeunit/timeunit.go +++ b/typedapi/types/enums/timeunit/timeunit.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package timeunit package timeunit import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Time.ts#L69-L77 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Time.ts#L69-L77 type TimeUnit struct { Name string } diff --git a/typedapi/types/enums/tokenchar/tokenchar.go b/typedapi/types/enums/tokenchar/tokenchar.go index 14e7bd498e..22092dc549 100644 --- a/typedapi/types/enums/tokenchar/tokenchar.go +++ b/typedapi/types/enums/tokenchar/tokenchar.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package tokenchar package tokenchar import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L47-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L47-L54 type TokenChar struct { Name string } diff --git a/typedapi/types/enums/tokenizationtruncate/tokenizationtruncate.go b/typedapi/types/enums/tokenizationtruncate/tokenizationtruncate.go index 14e84cccab..c405669f9d 100644 --- a/typedapi/types/enums/tokenizationtruncate/tokenizationtruncate.go +++ b/typedapi/types/enums/tokenizationtruncate/tokenizationtruncate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package tokenizationtruncate package tokenizationtruncate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L350-L354 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L350-L354 type TokenizationTruncate struct { Name string } diff --git a/typedapi/types/enums/totalhitsrelation/totalhitsrelation.go b/typedapi/types/enums/totalhitsrelation/totalhitsrelation.go index 71fb1331fd..b70fd81bd7 100644 --- a/typedapi/types/enums/totalhitsrelation/totalhitsrelation.go +++ b/typedapi/types/enums/totalhitsrelation/totalhitsrelation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package totalhitsrelation package totalhitsrelation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/hits.ts#L99-L104 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/hits.ts#L99-L104 type TotalHitsRelation struct { Name string } diff --git a/typedapi/types/enums/trainedmodeltype/trainedmodeltype.go b/typedapi/types/enums/trainedmodeltype/trainedmodeltype.go index 2584334c84..41540d8bc0 100644 --- a/typedapi/types/enums/trainedmodeltype/trainedmodeltype.go +++ b/typedapi/types/enums/trainedmodeltype/trainedmodeltype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package trainedmodeltype package trainedmodeltype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L258-L272 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L258-L272 type TrainedModelType struct { Name string } diff --git a/typedapi/types/enums/trainingpriority/trainingpriority.go b/typedapi/types/enums/trainingpriority/trainingpriority.go index da68fc62f3..32a9b3580c 100644 --- a/typedapi/types/enums/trainingpriority/trainingpriority.go +++ b/typedapi/types/enums/trainingpriority/trainingpriority.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package trainingpriority package trainingpriority import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L311-L314 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L311-L314 type TrainingPriority struct { Name string } diff --git a/typedapi/types/enums/translogdurability/translogdurability.go b/typedapi/types/enums/translogdurability/translogdurability.go index 14f4af7efe..1b524cd0fe 100644 --- a/typedapi/types/enums/translogdurability/translogdurability.go +++ b/typedapi/types/enums/translogdurability/translogdurability.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package translogdurability package translogdurability import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L359-L374 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L363-L378 type TranslogDurability struct { Name string } diff --git a/typedapi/types/enums/ttesttype/ttesttype.go b/typedapi/types/enums/ttesttype/ttesttype.go index 0ec78fd680..e6a4c89707 100644 --- a/typedapi/types/enums/ttesttype/ttesttype.go +++ b/typedapi/types/enums/ttesttype/ttesttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package ttesttype package ttesttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L322-L335 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L322-L335 type TTestType struct { Name string } diff --git a/typedapi/types/enums/type_/type_.go b/typedapi/types/enums/type_/type_.go index 0c45d13f48..63adfc48ce 100644 --- a/typedapi/types/enums/type_/type_.go +++ b/typedapi/types/enums/type_/type_.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package type_ package type_ import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/shutdown/_types/types.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/shutdown/_types/types.ts#L20-L24 type Type struct { Name string } diff --git a/typedapi/types/enums/unassignedinformationreason/unassignedinformationreason.go b/typedapi/types/enums/unassignedinformationreason/unassignedinformationreason.go index 9f5b423699..b112a4efcc 100644 --- a/typedapi/types/enums/unassignedinformationreason/unassignedinformationreason.go +++ b/typedapi/types/enums/unassignedinformationreason/unassignedinformationreason.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package unassignedinformationreason package unassignedinformationreason import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/types.ts#L127-L146 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/types.ts#L127-L146 type UnassignedInformationReason struct { Name string } diff --git a/typedapi/types/enums/useragentproperty/useragentproperty.go b/typedapi/types/enums/useragentproperty/useragentproperty.go index 08ec0a386e..01d3f9627d 100644 --- a/typedapi/types/enums/useragentproperty/useragentproperty.go +++ b/typedapi/types/enums/useragentproperty/useragentproperty.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package useragentproperty package useragentproperty import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L266-L277 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L266-L277 type UserAgentProperty struct { Name string } diff --git a/typedapi/types/enums/valuetype/valuetype.go b/typedapi/types/enums/valuetype/valuetype.go index 26edcdecf1..4083b026d3 100644 --- a/typedapi/types/enums/valuetype/valuetype.go +++ b/typedapi/types/enums/valuetype/valuetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package valuetype package valuetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L419-L430 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L419-L430 type ValueType struct { Name string } diff --git a/typedapi/types/enums/versiontype/versiontype.go b/typedapi/types/enums/versiontype/versiontype.go index 53d497ecb2..191d337d45 100644 --- a/typedapi/types/enums/versiontype/versiontype.go +++ b/typedapi/types/enums/versiontype/versiontype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package versiontype package versiontype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L106-L122 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L106-L122 type VersionType struct { Name string } diff --git a/typedapi/types/enums/waitforactiveshardoptions/waitforactiveshardoptions.go b/typedapi/types/enums/waitforactiveshardoptions/waitforactiveshardoptions.go index b41e605f7e..320658c56c 100644 --- a/typedapi/types/enums/waitforactiveshardoptions/waitforactiveshardoptions.go +++ b/typedapi/types/enums/waitforactiveshardoptions/waitforactiveshardoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package waitforactiveshardoptions package waitforactiveshardoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L305-L309 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L305-L309 type WaitForActiveShardOptions struct { Name string } diff --git a/typedapi/types/enums/waitforevents/waitforevents.go b/typedapi/types/enums/waitforevents/waitforevents.go index f5caa80fe2..3a6dce4c46 100644 --- a/typedapi/types/enums/waitforevents/waitforevents.go +++ b/typedapi/types/enums/waitforevents/waitforevents.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package waitforevents package waitforevents import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L311-L318 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L311-L318 type WaitForEvents struct { Name string } diff --git a/typedapi/types/enums/watchermetric/watchermetric.go b/typedapi/types/enums/watchermetric/watchermetric.go index 479e4c4929..53b86f0919 100644 --- a/typedapi/types/enums/watchermetric/watchermetric.go +++ b/typedapi/types/enums/watchermetric/watchermetric.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package watchermetric package watchermetric import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/stats/types.ts#L42-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/stats/types.ts#L42-L48 type WatcherMetric struct { Name string } diff --git a/typedapi/types/enums/watcherstate/watcherstate.go b/typedapi/types/enums/watcherstate/watcherstate.go index a2a5b1e953..4453542b3e 100644 --- a/typedapi/types/enums/watcherstate/watcherstate.go +++ b/typedapi/types/enums/watcherstate/watcherstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package watcherstate package watcherstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/stats/types.ts#L26-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/stats/types.ts#L26-L31 type WatcherState struct { Name string } diff --git a/typedapi/types/enums/zerotermsquery/zerotermsquery.go b/typedapi/types/enums/zerotermsquery/zerotermsquery.go index 50ed44cfec..ddf5d102d3 100644 --- a/typedapi/types/enums/zerotermsquery/zerotermsquery.go +++ b/typedapi/types/enums/zerotermsquery/zerotermsquery.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Package zerotermsquery package zerotermsquery import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L569-L578 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L569-L578 type ZeroTermsQuery struct { Name string } diff --git a/typedapi/types/epochtimeunitmillis.go b/typedapi/types/epochtimeunitmillis.go index 0f88d25795..5d209418da 100644 --- a/typedapi/types/epochtimeunitmillis.go +++ b/typedapi/types/epochtimeunitmillis.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // EpochTimeUnitMillis type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Time.ts#L40-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Time.ts#L40-L40 type EpochTimeUnitMillis int64 diff --git a/typedapi/types/epochtimeunitseconds.go b/typedapi/types/epochtimeunitseconds.go index 175ecf0276..40a87c3b36 100644 --- a/typedapi/types/epochtimeunitseconds.go +++ b/typedapi/types/epochtimeunitseconds.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // EpochTimeUnitSeconds type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Time.ts#L40-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Time.ts#L40-L40 type EpochTimeUnitSeconds int64 diff --git a/typedapi/types/eql.go b/typedapi/types/eql.go index 4289aea9ad..896a5a6a7a 100644 --- a/typedapi/types/eql.go +++ b/typedapi/types/eql.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Eql type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L351-L354 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L351-L354 type Eql struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -60,7 +61,7 @@ func (s *Eql) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -74,7 +75,7 @@ func (s *Eql) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -83,7 +84,7 @@ func (s *Eql) UnmarshalJSON(data []byte) error { case "features": if err := dec.Decode(&s.Features); err != nil { - return err + return fmt.Errorf("%s | %w", "Features", err) } case "queries": @@ -91,7 +92,7 @@ func (s *Eql) UnmarshalJSON(data []byte) error { s.Queries = make(map[string]XpackQuery, 0) } if err := dec.Decode(&s.Queries); err != nil { - return err + return fmt.Errorf("%s | %w", "Queries", err) } } diff --git a/typedapi/types/eqlfeatures.go b/typedapi/types/eqlfeatures.go index 88f6666c70..2e1ac28b87 100644 --- a/typedapi/types/eqlfeatures.go +++ b/typedapi/types/eqlfeatures.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // EqlFeatures type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L99-L107 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L99-L107 type EqlFeatures struct { Event uint `json:"event"` Join uint `json:"join"` diff --git a/typedapi/types/eqlfeaturesjoin.go b/typedapi/types/eqlfeaturesjoin.go index af78f9d148..0611080122 100644 --- a/typedapi/types/eqlfeaturesjoin.go +++ b/typedapi/types/eqlfeaturesjoin.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // EqlFeaturesJoin type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L109-L115 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L109-L115 type EqlFeaturesJoin struct { JoinQueriesFiveOrMore uint `json:"join_queries_five_or_more"` JoinQueriesFour uint `json:"join_queries_four"` diff --git a/typedapi/types/eqlfeatureskeys.go b/typedapi/types/eqlfeatureskeys.go index 2eee9c0f24..1dd74002e6 100644 --- a/typedapi/types/eqlfeatureskeys.go +++ b/typedapi/types/eqlfeatureskeys.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // EqlFeaturesKeys type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L117-L123 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L117-L123 type EqlFeaturesKeys struct { JoinKeysFiveOrMore uint `json:"join_keys_five_or_more"` JoinKeysFour uint `json:"join_keys_four"` diff --git a/typedapi/types/eqlfeaturespipes.go b/typedapi/types/eqlfeaturespipes.go index 6f62a4bf62..26c7420c55 100644 --- a/typedapi/types/eqlfeaturespipes.go +++ b/typedapi/types/eqlfeaturespipes.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // EqlFeaturesPipes type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L125-L128 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L125-L128 type EqlFeaturesPipes struct { PipeHead uint `json:"pipe_head"` PipeTail uint `json:"pipe_tail"` diff --git a/typedapi/types/eqlfeaturessequences.go b/typedapi/types/eqlfeaturessequences.go index dbf6b67e48..3484fcac4d 100644 --- a/typedapi/types/eqlfeaturessequences.go +++ b/typedapi/types/eqlfeaturessequences.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // EqlFeaturesSequences type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L130-L137 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L130-L137 type EqlFeaturesSequences struct { SequenceMaxspan uint `json:"sequence_maxspan"` SequenceQueriesFiveOrMore uint `json:"sequence_queries_five_or_more"` diff --git a/typedapi/types/eqlhits.go b/typedapi/types/eqlhits.go index 644a2b501d..28f49fc6b9 100644 --- a/typedapi/types/eqlhits.go +++ b/typedapi/types/eqlhits.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // EqlHits type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/eql/_types/EqlHits.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/eql/_types/EqlHits.ts#L25-L39 type EqlHits struct { // Events Contains events matching the query. Each object represents a matching event. Events []HitsEvent `json:"events,omitempty"` diff --git a/typedapi/types/errorcause.go b/typedapi/types/errorcause.go index ba0834202f..5ea7dd6ba6 100644 --- a/typedapi/types/errorcause.go +++ b/typedapi/types/errorcause.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -31,7 +31,7 @@ import ( // ErrorCause type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Errors.ts#L25-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Errors.ts#L25-L48 type ErrorCause struct { CausedBy *ErrorCause `json:"caused_by,omitempty"` Metadata map[string]json.RawMessage `json:"-"` @@ -69,13 +69,13 @@ func (s *ErrorCause) UnmarshalJSON(data []byte) error { case "caused_by": if err := dec.Decode(&s.CausedBy); err != nil { - return err + return fmt.Errorf("%s | %w", "CausedBy", err) } case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -86,13 +86,13 @@ func (s *ErrorCause) UnmarshalJSON(data []byte) error { case "root_cause": if err := dec.Decode(&s.RootCause); err != nil { - return err + return fmt.Errorf("%s | %w", "RootCause", err) } case "stack_trace": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "StackTrace", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -103,13 +103,13 @@ func (s *ErrorCause) UnmarshalJSON(data []byte) error { case "suppressed": if err := dec.Decode(&s.Suppressed); err != nil { - return err + return fmt.Errorf("%s | %w", "Suppressed", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -126,7 +126,7 @@ func (s *ErrorCause) UnmarshalJSON(data []byte) error { } raw := new(json.RawMessage) if err := dec.Decode(&raw); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } s.Metadata[key] = *raw } diff --git a/typedapi/types/errorresponsebase.go b/typedapi/types/errorresponsebase.go index 1d15733304..da69abd810 100644 --- a/typedapi/types/errorresponsebase.go +++ b/typedapi/types/errorresponsebase.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ErrorResponseBase type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Base.ts#L66-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Base.ts#L76-L85 type ErrorResponseBase struct { Error ErrorCause `json:"error"` Status int `json:"status"` @@ -53,7 +54,7 @@ func (s *ErrorResponseBase) UnmarshalJSON(data []byte) error { case "error": if err := dec.Decode(&s.Error); err != nil { - return err + return fmt.Errorf("%s | %w", "Error", err) } case "status": @@ -64,7 +65,7 @@ func (s *ErrorResponseBase) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } s.Status = value case float64: diff --git a/typedapi/types/esqlcolumns.go b/typedapi/types/esqlcolumns.go index 036eac4837..c21cdf205d 100644 --- a/typedapi/types/esqlcolumns.go +++ b/typedapi/types/esqlcolumns.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // EsqlColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Binary.ts#L24-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Binary.ts#L24-L24 type EsqlColumns []byte diff --git a/typedapi/types/eventdatastream.go b/typedapi/types/eventdatastream.go index 35b24777e1..77986837bf 100644 --- a/typedapi/types/eventdatastream.go +++ b/typedapi/types/eventdatastream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // EventDataStream type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/_types/BehavioralAnalytics.ts#L29-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/_types/BehavioralAnalytics.ts#L29-L31 type EventDataStream struct { Name string `json:"name"` } @@ -51,7 +52,7 @@ func (s *EventDataStream) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/ewmamodelsettings.go b/typedapi/types/ewmamodelsettings.go index aceff897ca..a22949a5e3 100644 --- a/typedapi/types/ewmamodelsettings.go +++ b/typedapi/types/ewmamodelsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // EwmaModelSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L267-L269 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L267-L269 type EwmaModelSettings struct { Alpha *float32 `json:"alpha,omitempty"` } @@ -57,7 +58,7 @@ func (s *EwmaModelSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Alpha", err) } f := float32(value) s.Alpha = &f diff --git a/typedapi/types/ewmamovingaverageaggregation.go b/typedapi/types/ewmamovingaverageaggregation.go index d71fb1e2de..f91df63f0b 100644 --- a/typedapi/types/ewmamovingaverageaggregation.go +++ b/typedapi/types/ewmamovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // EwmaMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L252-L255 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L252-L255 type EwmaMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -68,13 +69,13 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,12 +86,12 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "minimize": @@ -100,7 +101,7 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Minimize", err) } s.Minimize = &value case bool: @@ -109,13 +110,13 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "model": if err := dec.Decode(&s.Model); err != nil { - return err + return fmt.Errorf("%s | %w", "Model", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -132,7 +133,7 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Predict", err) } s.Predict = &value case float64: @@ -142,7 +143,7 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "settings": if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "window": @@ -153,7 +154,7 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Window", err) } s.Window = &value case float64: diff --git a/typedapi/types/executeenrichpolicystatus.go b/typedapi/types/executeenrichpolicystatus.go index 72a75231d6..20dbd350bc 100644 --- a/typedapi/types/executeenrichpolicystatus.go +++ b/typedapi/types/executeenrichpolicystatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // ExecuteEnrichPolicyStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/execute_policy/types.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/execute_policy/types.ts#L20-L22 type ExecuteEnrichPolicyStatus struct { Phase enrichpolicyphase.EnrichPolicyPhase `json:"phase"` } diff --git a/typedapi/types/executingpolicy.go b/typedapi/types/executingpolicy.go index 0b8f3031ed..7c4015730e 100644 --- a/typedapi/types/executingpolicy.go +++ b/typedapi/types/executingpolicy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ExecutingPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/stats/types.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/stats/types.ts#L24-L27 type ExecutingPolicy struct { Name string `json:"name"` Task TaskInfo `json:"task"` @@ -52,12 +53,12 @@ func (s *ExecutingPolicy) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "task": if err := dec.Decode(&s.Task); err != nil { - return err + return fmt.Errorf("%s | %w", "Task", err) } } diff --git a/typedapi/types/executionresult.go b/typedapi/types/executionresult.go index 06346678e3..1e777d7f77 100644 --- a/typedapi/types/executionresult.go +++ b/typedapi/types/executionresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ExecutionResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Execution.ts#L60-L66 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Execution.ts#L60-L66 type ExecutionResult struct { Actions []ExecutionResultAction `json:"actions"` Condition ExecutionResultCondition `json:"condition"` @@ -55,27 +56,27 @@ func (s *ExecutionResult) UnmarshalJSON(data []byte) error { case "actions": if err := dec.Decode(&s.Actions); err != nil { - return err + return fmt.Errorf("%s | %w", "Actions", err) } case "condition": if err := dec.Decode(&s.Condition); err != nil { - return err + return fmt.Errorf("%s | %w", "Condition", err) } case "execution_duration": if err := dec.Decode(&s.ExecutionDuration); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutionDuration", err) } case "execution_time": if err := dec.Decode(&s.ExecutionTime); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutionTime", err) } case "input": if err := dec.Decode(&s.Input); err != nil { - return err + return fmt.Errorf("%s | %w", "Input", err) } } diff --git a/typedapi/types/executionresultaction.go b/typedapi/types/executionresultaction.go index 462be690b6..46760f443e 100644 --- a/typedapi/types/executionresultaction.go +++ b/typedapi/types/executionresultaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // ExecutionResultAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Execution.ts#L74-L86 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Execution.ts#L74-L86 type ExecutionResultAction struct { Email *EmailResult `json:"email,omitempty"` Error *ErrorCause `json:"error,omitempty"` @@ -65,38 +66,38 @@ func (s *ExecutionResultAction) UnmarshalJSON(data []byte) error { case "email": if err := dec.Decode(&s.Email); err != nil { - return err + return fmt.Errorf("%s | %w", "Email", err) } case "error": if err := dec.Decode(&s.Error); err != nil { - return err + return fmt.Errorf("%s | %w", "Error", err) } case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "logging": if err := dec.Decode(&s.Logging); err != nil { - return err + return fmt.Errorf("%s | %w", "Logging", err) } case "pagerduty": if err := dec.Decode(&s.Pagerduty); err != nil { - return err + return fmt.Errorf("%s | %w", "Pagerduty", err) } case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -107,22 +108,22 @@ func (s *ExecutionResultAction) UnmarshalJSON(data []byte) error { case "slack": if err := dec.Decode(&s.Slack); err != nil { - return err + return fmt.Errorf("%s | %w", "Slack", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "webhook": if err := dec.Decode(&s.Webhook); err != nil { - return err + return fmt.Errorf("%s | %w", "Webhook", err) } } diff --git a/typedapi/types/executionresultcondition.go b/typedapi/types/executionresultcondition.go index ca780a6824..4ce7054e4b 100644 --- a/typedapi/types/executionresultcondition.go +++ b/typedapi/types/executionresultcondition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // ExecutionResultCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Execution.ts#L68-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Execution.ts#L68-L72 type ExecutionResultCondition struct { Met bool `json:"met"` Status actionstatusoptions.ActionStatusOptions `json:"status"` @@ -62,7 +63,7 @@ func (s *ExecutionResultCondition) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Met", err) } s.Met = value case bool: @@ -71,12 +72,12 @@ func (s *ExecutionResultCondition) UnmarshalJSON(data []byte) error { case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/executionresultinput.go b/typedapi/types/executionresultinput.go index 09d4892f8c..e40d3ba88a 100644 --- a/typedapi/types/executionresultinput.go +++ b/typedapi/types/executionresultinput.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -29,7 +29,7 @@ import ( // ExecutionResultInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Execution.ts#L88-L92 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Execution.ts#L88-L92 type ExecutionResultInput struct { Payload map[string]json.RawMessage `json:"payload"` Status actionstatusoptions.ActionStatusOptions `json:"status"` diff --git a/typedapi/types/executionstate.go b/typedapi/types/executionstate.go index deafda3201..bd490927f9 100644 --- a/typedapi/types/executionstate.go +++ b/typedapi/types/executionstate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ExecutionState type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Action.ts#L120-L124 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Action.ts#L120-L124 type ExecutionState struct { Reason *string `json:"reason,omitempty"` Successful bool `json:"successful"` @@ -55,7 +56,7 @@ func (s *ExecutionState) UnmarshalJSON(data []byte) error { case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -71,7 +72,7 @@ func (s *ExecutionState) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Successful", err) } s.Successful = value case bool: @@ -80,7 +81,7 @@ func (s *ExecutionState) UnmarshalJSON(data []byte) error { case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } } diff --git a/typedapi/types/executionthreadpool.go b/typedapi/types/executionthreadpool.go index 3ef32f5c4e..4ec8d5dec6 100644 --- a/typedapi/types/executionthreadpool.go +++ b/typedapi/types/executionthreadpool.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ExecutionThreadPool type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Execution.ts#L94-L97 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Execution.ts#L94-L97 type ExecutionThreadPool struct { MaxSize int64 `json:"max_size"` QueueSize int64 `json:"queue_size"` @@ -58,7 +59,7 @@ func (s *ExecutionThreadPool) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxSize", err) } s.MaxSize = value case float64: @@ -73,7 +74,7 @@ func (s *ExecutionThreadPool) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "QueueSize", err) } s.QueueSize = value case float64: diff --git a/typedapi/types/existsquery.go b/typedapi/types/existsquery.go index 7620d0bc62..5e317fee52 100644 --- a/typedapi/types/existsquery.go +++ b/typedapi/types/existsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ExistsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L36-L41 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L36-L41 type ExistsQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -65,7 +66,7 @@ func (s *ExistsQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -76,13 +77,13 @@ func (s *ExistsQuery) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/expandwildcards.go b/typedapi/types/expandwildcards.go index 35f52711e9..e0a5834f1d 100644 --- a/typedapi/types/expandwildcards.go +++ b/typedapi/types/expandwildcards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ import ( // ExpandWildcards type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L217-L217 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L217-L217 type ExpandWildcards []expandwildcard.ExpandWildcard diff --git a/typedapi/types/explainanalyzetoken.go b/typedapi/types/explainanalyzetoken.go index bd0afb2b8a..e4f5e31410 100644 --- a/typedapi/types/explainanalyzetoken.go +++ b/typedapi/types/explainanalyzetoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -31,7 +31,7 @@ import ( // ExplainAnalyzeToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/analyze/types.ts#L52-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/analyze/types.ts#L52-L64 type ExplainAnalyzeToken struct { Bytes string `json:"bytes"` EndOffset int64 `json:"end_offset"` @@ -63,7 +63,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { case "bytes": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Bytes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +79,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "EndOffset", err) } s.EndOffset = value case float64: @@ -94,7 +94,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Keyword", err) } s.Keyword = &value case bool: @@ -108,7 +108,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Position", err) } s.Position = value case float64: @@ -123,7 +123,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PositionLength", err) } s.PositionLength = value case float64: @@ -138,7 +138,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "StartOffset", err) } s.StartOffset = value case float64: @@ -153,7 +153,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TermFrequency", err) } s.TermFrequency = value case float64: @@ -164,7 +164,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { case "token": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Token", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -176,7 +176,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -193,7 +193,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { } raw := new(json.RawMessage) if err := dec.Decode(&raw); err != nil { - return err + return fmt.Errorf("%s | %w", "ExplainAnalyzeToken", err) } s.ExplainAnalyzeToken[key] = *raw } diff --git a/typedapi/types/explanation.go b/typedapi/types/explanation.go index 0dd4035809..1058ede2f2 100644 --- a/typedapi/types/explanation.go +++ b/typedapi/types/explanation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Explanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/explain/types.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/explain/types.ts#L22-L26 type Explanation struct { Description string `json:"description"` Details []ExplanationDetail `json:"details"` @@ -55,7 +56,7 @@ func (s *Explanation) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,7 +67,7 @@ func (s *Explanation) UnmarshalJSON(data []byte) error { case "details": if err := dec.Decode(&s.Details); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } case "value": @@ -76,7 +77,7 @@ func (s *Explanation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } f := float32(value) s.Value = f diff --git a/typedapi/types/explanationdetail.go b/typedapi/types/explanationdetail.go index 8e2229be20..8f00fb993f 100644 --- a/typedapi/types/explanationdetail.go +++ b/typedapi/types/explanationdetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ExplanationDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/explain/types.ts#L28-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/explain/types.ts#L28-L32 type ExplanationDetail struct { Description string `json:"description"` Details []ExplanationDetail `json:"details,omitempty"` @@ -55,7 +56,7 @@ func (s *ExplanationDetail) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,7 +67,7 @@ func (s *ExplanationDetail) UnmarshalJSON(data []byte) error { case "details": if err := dec.Decode(&s.Details); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } case "value": @@ -76,7 +77,7 @@ func (s *ExplanationDetail) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } f := float32(value) s.Value = f diff --git a/typedapi/types/explorecontrols.go b/typedapi/types/explorecontrols.go index ab6783c46a..532b224c0e 100644 --- a/typedapi/types/explorecontrols.go +++ b/typedapi/types/explorecontrols.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ExploreControls type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/graph/_types/ExploreControls.ts#L24-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/graph/_types/ExploreControls.ts#L24-L49 type ExploreControls struct { // SampleDiversity To avoid the top-matching documents sample being dominated by a single source // of results, it is sometimes necessary to request diversity in the sample. @@ -73,7 +74,7 @@ func (s *ExploreControls) UnmarshalJSON(data []byte) error { case "sample_diversity": if err := dec.Decode(&s.SampleDiversity); err != nil { - return err + return fmt.Errorf("%s | %w", "SampleDiversity", err) } case "sample_size": @@ -84,7 +85,7 @@ func (s *ExploreControls) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SampleSize", err) } s.SampleSize = &value case float64: @@ -94,7 +95,7 @@ func (s *ExploreControls) UnmarshalJSON(data []byte) error { case "timeout": if err := dec.Decode(&s.Timeout); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } case "use_significance": @@ -104,7 +105,7 @@ func (s *ExploreControls) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UseSignificance", err) } s.UseSignificance = value case bool: diff --git a/typedapi/types/extendedboundsdouble.go b/typedapi/types/extendedboundsdouble.go index 1287e1f6ed..1fd2ceb0bf 100644 --- a/typedapi/types/extendedboundsdouble.go +++ b/typedapi/types/extendedboundsdouble.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ExtendedBoundsdouble type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L489-L498 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L489-L498 type ExtendedBoundsdouble struct { // Max Maximum value for the bound. Max Float64 `json:"max"` @@ -60,7 +61,7 @@ func (s *ExtendedBoundsdouble) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } f := Float64(value) s.Max = f @@ -76,7 +77,7 @@ func (s *ExtendedBoundsdouble) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } f := Float64(value) s.Min = f diff --git a/typedapi/types/extendedboundsfielddatemath.go b/typedapi/types/extendedboundsfielddatemath.go index 94ff112ea5..4ff35acdac 100644 --- a/typedapi/types/extendedboundsfielddatemath.go +++ b/typedapi/types/extendedboundsfielddatemath.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ExtendedBoundsFieldDateMath type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L489-L498 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L489-L498 type ExtendedBoundsFieldDateMath struct { // Max Maximum value for the bound. Max FieldDateMath `json:"max"` @@ -54,12 +55,12 @@ func (s *ExtendedBoundsFieldDateMath) UnmarshalJSON(data []byte) error { case "max": if err := dec.Decode(&s.Max); err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } case "min": if err := dec.Decode(&s.Min); err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } } diff --git a/typedapi/types/extendedmemorystats.go b/typedapi/types/extendedmemorystats.go index 0dea2539fb..e424b9e549 100644 --- a/typedapi/types/extendedmemorystats.go +++ b/typedapi/types/extendedmemorystats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ExtendedMemoryStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L622-L631 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L622-L631 type ExtendedMemoryStats struct { // AdjustedTotalInBytes If the amount of physical memory has been overridden using the // `es`.`total_memory_bytes` system property then this reports the overridden @@ -77,7 +78,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AdjustedTotalInBytes", err) } s.AdjustedTotalInBytes = &value case float64: @@ -92,7 +93,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FreeInBytes", err) } s.FreeInBytes = &value case float64: @@ -108,7 +109,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FreePercent", err) } s.FreePercent = &value case float64: @@ -119,7 +120,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case "resident": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Resident", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -135,7 +136,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ResidentInBytes", err) } s.ResidentInBytes = &value case float64: @@ -146,7 +147,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case "share": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Share", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -162,7 +163,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShareInBytes", err) } s.ShareInBytes = &value case float64: @@ -177,7 +178,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalInBytes", err) } s.TotalInBytes = &value case float64: @@ -188,7 +189,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case "total_virtual": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalVirtual", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -204,7 +205,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalVirtualInBytes", err) } s.TotalVirtualInBytes = &value case float64: @@ -219,7 +220,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UsedInBytes", err) } s.UsedInBytes = &value case float64: @@ -235,7 +236,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UsedPercent", err) } s.UsedPercent = &value case float64: diff --git a/typedapi/types/extendedstatsaggregate.go b/typedapi/types/extendedstatsaggregate.go index dd60d301bc..588f4d460f 100644 --- a/typedapi/types/extendedstatsaggregate.go +++ b/typedapi/types/extendedstatsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ExtendedStatsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L278-L296 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L278-L296 type ExtendedStatsAggregate struct { Avg Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` @@ -75,13 +76,13 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { case "avg": if err := dec.Decode(&s.Avg); err != nil { - return err + return fmt.Errorf("%s | %w", "Avg", err) } case "avg_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AvgAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -97,7 +98,7 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -107,13 +108,13 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { case "max": if err := dec.Decode(&s.Max); err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } case "max_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -124,18 +125,18 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min": if err := dec.Decode(&s.Min); err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } case "min_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MinAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -146,13 +147,13 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { case "std_deviation": if err := dec.Decode(&s.StdDeviation); err != nil { - return err + return fmt.Errorf("%s | %w", "StdDeviation", err) } case "std_deviation_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "StdDeviationAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -163,22 +164,22 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { case "std_deviation_bounds": if err := dec.Decode(&s.StdDeviationBounds); err != nil { - return err + return fmt.Errorf("%s | %w", "StdDeviationBounds", err) } case "std_deviation_bounds_as_string": if err := dec.Decode(&s.StdDeviationBoundsAsString); err != nil { - return err + return fmt.Errorf("%s | %w", "StdDeviationBoundsAsString", err) } case "std_deviation_population": if err := dec.Decode(&s.StdDeviationPopulation); err != nil { - return err + return fmt.Errorf("%s | %w", "StdDeviationPopulation", err) } case "std_deviation_sampling": if err := dec.Decode(&s.StdDeviationSampling); err != nil { - return err + return fmt.Errorf("%s | %w", "StdDeviationSampling", err) } case "sum": @@ -188,7 +189,7 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Sum", err) } f := Float64(value) s.Sum = f @@ -200,7 +201,7 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { case "sum_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SumAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -211,13 +212,13 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { case "sum_of_squares": if err := dec.Decode(&s.SumOfSquares); err != nil { - return err + return fmt.Errorf("%s | %w", "SumOfSquares", err) } case "sum_of_squares_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SumOfSquaresAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -228,13 +229,13 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { case "variance": if err := dec.Decode(&s.Variance); err != nil { - return err + return fmt.Errorf("%s | %w", "Variance", err) } case "variance_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "VarianceAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -245,13 +246,13 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { case "variance_population": if err := dec.Decode(&s.VariancePopulation); err != nil { - return err + return fmt.Errorf("%s | %w", "VariancePopulation", err) } case "variance_population_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "VariancePopulationAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -262,13 +263,13 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { case "variance_sampling": if err := dec.Decode(&s.VarianceSampling); err != nil { - return err + return fmt.Errorf("%s | %w", "VarianceSampling", err) } case "variance_sampling_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "VarianceSamplingAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/extendedstatsaggregation.go b/typedapi/types/extendedstatsaggregation.go index caae75dc06..a936ea4a1c 100644 --- a/typedapi/types/extendedstatsaggregation.go +++ b/typedapi/types/extendedstatsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ExtendedStatsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L101-L106 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L101-L106 type ExtendedStatsAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -60,13 +61,13 @@ func (s *ExtendedStatsAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -77,13 +78,13 @@ func (s *ExtendedStatsAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -92,7 +93,7 @@ func (s *ExtendedStatsAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -101,7 +102,7 @@ func (s *ExtendedStatsAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -109,7 +110,7 @@ func (s *ExtendedStatsAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -123,7 +124,7 @@ func (s *ExtendedStatsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Sigma", err) } f := Float64(value) s.Sigma = &f diff --git a/typedapi/types/extendedstatsbucketaggregate.go b/typedapi/types/extendedstatsbucketaggregate.go index 9acfc41902..8727ab4f3e 100644 --- a/typedapi/types/extendedstatsbucketaggregate.go +++ b/typedapi/types/extendedstatsbucketaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ExtendedStatsBucketAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L298-L299 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L298-L299 type ExtendedStatsBucketAggregate struct { Avg Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` @@ -75,13 +76,13 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { case "avg": if err := dec.Decode(&s.Avg); err != nil { - return err + return fmt.Errorf("%s | %w", "Avg", err) } case "avg_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AvgAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -97,7 +98,7 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -107,13 +108,13 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { case "max": if err := dec.Decode(&s.Max); err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } case "max_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -124,18 +125,18 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min": if err := dec.Decode(&s.Min); err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } case "min_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MinAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -146,13 +147,13 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { case "std_deviation": if err := dec.Decode(&s.StdDeviation); err != nil { - return err + return fmt.Errorf("%s | %w", "StdDeviation", err) } case "std_deviation_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "StdDeviationAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -163,22 +164,22 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { case "std_deviation_bounds": if err := dec.Decode(&s.StdDeviationBounds); err != nil { - return err + return fmt.Errorf("%s | %w", "StdDeviationBounds", err) } case "std_deviation_bounds_as_string": if err := dec.Decode(&s.StdDeviationBoundsAsString); err != nil { - return err + return fmt.Errorf("%s | %w", "StdDeviationBoundsAsString", err) } case "std_deviation_population": if err := dec.Decode(&s.StdDeviationPopulation); err != nil { - return err + return fmt.Errorf("%s | %w", "StdDeviationPopulation", err) } case "std_deviation_sampling": if err := dec.Decode(&s.StdDeviationSampling); err != nil { - return err + return fmt.Errorf("%s | %w", "StdDeviationSampling", err) } case "sum": @@ -188,7 +189,7 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Sum", err) } f := Float64(value) s.Sum = f @@ -200,7 +201,7 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { case "sum_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SumAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -211,13 +212,13 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { case "sum_of_squares": if err := dec.Decode(&s.SumOfSquares); err != nil { - return err + return fmt.Errorf("%s | %w", "SumOfSquares", err) } case "sum_of_squares_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SumOfSquaresAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -228,13 +229,13 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { case "variance": if err := dec.Decode(&s.Variance); err != nil { - return err + return fmt.Errorf("%s | %w", "Variance", err) } case "variance_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "VarianceAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -245,13 +246,13 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { case "variance_population": if err := dec.Decode(&s.VariancePopulation); err != nil { - return err + return fmt.Errorf("%s | %w", "VariancePopulation", err) } case "variance_population_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "VariancePopulationAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -262,13 +263,13 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { case "variance_sampling": if err := dec.Decode(&s.VarianceSampling); err != nil { - return err + return fmt.Errorf("%s | %w", "VarianceSampling", err) } case "variance_sampling_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "VarianceSamplingAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/extendedstatsbucketaggregation.go b/typedapi/types/extendedstatsbucketaggregation.go index 949f5473a8..cf80e010b1 100644 --- a/typedapi/types/extendedstatsbucketaggregation.go +++ b/typedapi/types/extendedstatsbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // ExtendedStatsBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L198-L203 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L198-L203 type ExtendedStatsBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -65,13 +66,13 @@ func (s *ExtendedStatsBucketAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,18 +83,18 @@ func (s *ExtendedStatsBucketAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,7 +110,7 @@ func (s *ExtendedStatsBucketAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Sigma", err) } f := Float64(value) s.Sigma = &f diff --git a/typedapi/types/failprocessor.go b/typedapi/types/failprocessor.go index 805d25a431..d7988ac500 100644 --- a/typedapi/types/failprocessor.go +++ b/typedapi/types/failprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FailProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L648-L654 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L648-L654 type FailProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -67,7 +68,7 @@ func (s *FailProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *FailProcessor) UnmarshalJSON(data []byte) error { case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -95,7 +96,7 @@ func (s *FailProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -105,7 +106,7 @@ func (s *FailProcessor) UnmarshalJSON(data []byte) error { case "message": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Message", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -116,13 +117,13 @@ func (s *FailProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/feature.go b/typedapi/types/feature.go index 6ee301e583..efe1c6abec 100644 --- a/typedapi/types/feature.go +++ b/typedapi/types/feature.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Feature type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/features/_types/Feature.ts#L20-L23 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/features/_types/Feature.ts#L20-L23 type Feature struct { Description string `json:"description"` Name string `json:"name"` @@ -54,7 +55,7 @@ func (s *Feature) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,7 +67,7 @@ func (s *Feature) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/features.go b/typedapi/types/features.go index b2a0eeb902..3796c5af16 100644 --- a/typedapi/types/features.go +++ b/typedapi/types/features.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ import ( // Features type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get/IndicesGetRequest.ts#L95-L95 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get/IndicesGetRequest.ts#L95-L95 type Features []feature.Feature diff --git a/typedapi/types/featuretoggle.go b/typedapi/types/featuretoggle.go index 5f1b152bdd..1d445f594a 100644 --- a/typedapi/types/featuretoggle.go +++ b/typedapi/types/featuretoggle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FeatureToggle type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L40-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L40-L42 type FeatureToggle struct { Enabled bool `json:"enabled"` } @@ -57,7 +58,7 @@ func (s *FeatureToggle) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: diff --git a/typedapi/types/fetchprofile.go b/typedapi/types/fetchprofile.go index da69d67f6a..29f3c01aad 100644 --- a/typedapi/types/fetchprofile.go +++ b/typedapi/types/fetchprofile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FetchProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/profile.ts#L139-L146 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/profile.ts#L139-L146 type FetchProfile struct { Breakdown FetchProfileBreakdown `json:"breakdown"` Children []FetchProfile `json:"children,omitempty"` @@ -57,23 +58,23 @@ func (s *FetchProfile) UnmarshalJSON(data []byte) error { case "breakdown": if err := dec.Decode(&s.Breakdown); err != nil { - return err + return fmt.Errorf("%s | %w", "Breakdown", err) } case "children": if err := dec.Decode(&s.Children); err != nil { - return err + return fmt.Errorf("%s | %w", "Children", err) } case "debug": if err := dec.Decode(&s.Debug); err != nil { - return err + return fmt.Errorf("%s | %w", "Debug", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,13 +85,13 @@ func (s *FetchProfile) UnmarshalJSON(data []byte) error { case "time_in_nanos": if err := dec.Decode(&s.TimeInNanos); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeInNanos", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/fetchprofilebreakdown.go b/typedapi/types/fetchprofilebreakdown.go index 97d5e9579b..f71a08f625 100644 --- a/typedapi/types/fetchprofilebreakdown.go +++ b/typedapi/types/fetchprofilebreakdown.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FetchProfileBreakdown type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/profile.ts#L148-L157 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/profile.ts#L148-L157 type FetchProfileBreakdown struct { LoadSource *int `json:"load_source,omitempty"` LoadSourceCount *int `json:"load_source_count,omitempty"` @@ -65,7 +66,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "LoadSource", err) } s.LoadSource = &value case float64: @@ -81,7 +82,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "LoadSourceCount", err) } s.LoadSourceCount = &value case float64: @@ -97,7 +98,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "LoadStoredFields", err) } s.LoadStoredFields = &value case float64: @@ -113,7 +114,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "LoadStoredFieldsCount", err) } s.LoadStoredFieldsCount = &value case float64: @@ -129,7 +130,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NextReader", err) } s.NextReader = &value case float64: @@ -145,7 +146,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NextReaderCount", err) } s.NextReaderCount = &value case float64: @@ -161,7 +162,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Process", err) } s.Process = &value case float64: @@ -177,7 +178,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessCount", err) } s.ProcessCount = &value case float64: diff --git a/typedapi/types/fetchprofiledebug.go b/typedapi/types/fetchprofiledebug.go index e000fc0d8e..2fbfc28f6e 100644 --- a/typedapi/types/fetchprofiledebug.go +++ b/typedapi/types/fetchprofiledebug.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FetchProfileDebug type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/profile.ts#L159-L162 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/profile.ts#L159-L162 type FetchProfileDebug struct { FastPath *int `json:"fast_path,omitempty"` StoredFields []string `json:"stored_fields,omitempty"` @@ -59,7 +60,7 @@ func (s *FetchProfileDebug) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FastPath", err) } s.FastPath = &value case float64: @@ -69,7 +70,7 @@ func (s *FetchProfileDebug) UnmarshalJSON(data []byte) error { case "stored_fields": if err := dec.Decode(&s.StoredFields); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } } diff --git a/typedapi/types/fieldaliasproperty.go b/typedapi/types/fieldaliasproperty.go index 351e13e5fc..0867dc01c3 100644 --- a/typedapi/types/fieldaliasproperty.go +++ b/typedapi/types/fieldaliasproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // FieldAliasProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/specialized.ts#L49-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/specialized.ts#L49-L52 type FieldAliasProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -61,7 +62,7 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -379,7 +380,7 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -392,12 +393,12 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "path": if err := dec.Decode(&s.Path); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } case "properties": @@ -709,7 +710,7 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/fieldandformat.go b/typedapi/types/fieldandformat.go index 4c43ee775f..545d12f2e8 100644 --- a/typedapi/types/fieldandformat.go +++ b/typedapi/types/fieldandformat.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FieldAndFormat type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/abstractions.ts#L505-L519 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/abstractions.ts#L505-L519 type FieldAndFormat struct { // Field Wildcard pattern. The request returns values for field names matching this // pattern. @@ -66,13 +67,13 @@ func (s *FieldAndFormat) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -88,7 +89,7 @@ func (s *FieldAndFormat) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeUnmapped", err) } s.IncludeUnmapped = &value case bool: diff --git a/typedapi/types/fieldcapability.go b/typedapi/types/fieldcapability.go index a71102993f..9574368699 100644 --- a/typedapi/types/fieldcapability.go +++ b/typedapi/types/fieldcapability.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // FieldCapability type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/field_caps/types.ts#L23-L81 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/field_caps/types.ts#L23-L81 type FieldCapability struct { // Aggregatable Whether this field can be aggregated on all indices. Aggregatable bool `json:"aggregatable"` @@ -90,7 +91,7 @@ func (s *FieldCapability) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregatable", err) } s.Aggregatable = value case bool: @@ -103,19 +104,19 @@ func (s *FieldCapability) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "metadata_field": @@ -125,7 +126,7 @@ func (s *FieldCapability) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MetadataField", err) } s.MetadataField = &value case bool: @@ -134,7 +135,7 @@ func (s *FieldCapability) UnmarshalJSON(data []byte) error { case "metric_conflicts_indices": if err := dec.Decode(&s.MetricConflictsIndices); err != nil { - return err + return fmt.Errorf("%s | %w", "MetricConflictsIndices", err) } case "non_aggregatable_indices": @@ -143,19 +144,19 @@ func (s *FieldCapability) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "NonAggregatableIndices", err) } s.NonAggregatableIndices = append(s.NonAggregatableIndices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.NonAggregatableIndices); err != nil { - return err + return fmt.Errorf("%s | %w", "NonAggregatableIndices", err) } } case "non_dimension_indices": if err := dec.Decode(&s.NonDimensionIndices); err != nil { - return err + return fmt.Errorf("%s | %w", "NonDimensionIndices", err) } case "non_searchable_indices": @@ -164,13 +165,13 @@ func (s *FieldCapability) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "NonSearchableIndices", err) } s.NonSearchableIndices = append(s.NonSearchableIndices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.NonSearchableIndices); err != nil { - return err + return fmt.Errorf("%s | %w", "NonSearchableIndices", err) } } @@ -181,7 +182,7 @@ func (s *FieldCapability) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Searchable", err) } s.Searchable = value case bool: @@ -195,7 +196,7 @@ func (s *FieldCapability) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesDimension", err) } s.TimeSeriesDimension = &value case bool: @@ -204,13 +205,13 @@ func (s *FieldCapability) UnmarshalJSON(data []byte) error { case "time_series_metric": if err := dec.Decode(&s.TimeSeriesMetric); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesMetric", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/fieldcollapse.go b/typedapi/types/fieldcollapse.go index 0a6fd1f52a..edfb5697fa 100644 --- a/typedapi/types/fieldcollapse.go +++ b/typedapi/types/fieldcollapse.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FieldCollapse type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/FieldCollapse.ts#L24-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/FieldCollapse.ts#L24-L38 type FieldCollapse struct { Collapse *FieldCollapse `json:"collapse,omitempty"` // Field The field to collapse the result set on @@ -59,12 +60,12 @@ func (s *FieldCollapse) UnmarshalJSON(data []byte) error { case "collapse": if err := dec.Decode(&s.Collapse); err != nil { - return err + return fmt.Errorf("%s | %w", "Collapse", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "inner_hits": @@ -73,13 +74,13 @@ func (s *FieldCollapse) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewInnerHits() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "InnerHits", err) } s.InnerHits = append(s.InnerHits, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.InnerHits); err != nil { - return err + return fmt.Errorf("%s | %w", "InnerHits", err) } } @@ -91,7 +92,7 @@ func (s *FieldCollapse) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxConcurrentGroupSearches", err) } s.MaxConcurrentGroupSearches = &value case float64: diff --git a/typedapi/types/fielddatafrequencyfilter.go b/typedapi/types/fielddatafrequencyfilter.go index 7632c2ba65..bf1205cc11 100644 --- a/typedapi/types/fielddatafrequencyfilter.go +++ b/typedapi/types/fielddatafrequencyfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FielddataFrequencyFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/FielddataFrequencyFilter.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/FielddataFrequencyFilter.ts#L22-L26 type FielddataFrequencyFilter struct { Max Float64 `json:"max"` Min Float64 `json:"min"` @@ -59,7 +60,7 @@ func (s *FielddataFrequencyFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } f := Float64(value) s.Max = f @@ -75,7 +76,7 @@ func (s *FielddataFrequencyFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } f := Float64(value) s.Min = f @@ -92,7 +93,7 @@ func (s *FielddataFrequencyFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinSegmentSize", err) } s.MinSegmentSize = value case float64: diff --git a/typedapi/types/fielddatarecord.go b/typedapi/types/fielddatarecord.go index dcfea5ab2d..8e15703e58 100644 --- a/typedapi/types/fielddatarecord.go +++ b/typedapi/types/fielddatarecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FielddataRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/fielddata/types.ts#L20-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/fielddata/types.ts#L20-L48 type FielddataRecord struct { // Field field name Field *string `json:"field,omitempty"` @@ -64,7 +65,7 @@ func (s *FielddataRecord) UnmarshalJSON(data []byte) error { case "field", "f": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -76,7 +77,7 @@ func (s *FielddataRecord) UnmarshalJSON(data []byte) error { case "host", "h": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -88,7 +89,7 @@ func (s *FielddataRecord) UnmarshalJSON(data []byte) error { case "id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -100,7 +101,7 @@ func (s *FielddataRecord) UnmarshalJSON(data []byte) error { case "ip": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -112,7 +113,7 @@ func (s *FielddataRecord) UnmarshalJSON(data []byte) error { case "node", "n": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -124,7 +125,7 @@ func (s *FielddataRecord) UnmarshalJSON(data []byte) error { case "size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/fielddatastats.go b/typedapi/types/fielddatastats.go index 3351cbeac0..1586d0c3b8 100644 --- a/typedapi/types/fielddatastats.go +++ b/typedapi/types/fielddatastats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FielddataStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L111-L116 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L111-L116 type FielddataStats struct { Evictions *int64 `json:"evictions,omitempty"` Fields map[string]FieldMemoryUsage `json:"fields,omitempty"` @@ -60,7 +61,7 @@ func (s *FielddataStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Evictions", err) } s.Evictions = &value case float64: @@ -73,12 +74,12 @@ func (s *FielddataStats) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]FieldMemoryUsage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "memory_size": if err := dec.Decode(&s.MemorySize); err != nil { - return err + return fmt.Errorf("%s | %w", "MemorySize", err) } case "memory_size_in_bytes": @@ -88,7 +89,7 @@ func (s *FielddataStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MemorySizeInBytes", err) } s.MemorySizeInBytes = value case float64: diff --git a/typedapi/types/fielddatemath.go b/typedapi/types/fielddatemath.go index 20e1ceb935..4a38558cf9 100644 --- a/typedapi/types/fielddatemath.go +++ b/typedapi/types/fielddatemath.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // Float64 // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L296-L303 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L296-L303 type FieldDateMath interface{} diff --git a/typedapi/types/fieldlookup.go b/typedapi/types/fieldlookup.go index 08c4c085c0..35d72631e5 100644 --- a/typedapi/types/fieldlookup.go +++ b/typedapi/types/fieldlookup.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // FieldLookup type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/abstractions.ts#L409-L426 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/abstractions.ts#L409-L426 type FieldLookup struct { // Id `id` of the document. Id string `json:"id"` @@ -58,22 +59,22 @@ func (s *FieldLookup) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "path": if err := dec.Decode(&s.Path); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } } diff --git a/typedapi/types/fieldmapping.go b/typedapi/types/fieldmapping.go index 00bb0f7fce..fcef5245cc 100644 --- a/typedapi/types/fieldmapping.go +++ b/typedapi/types/fieldmapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FieldMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/meta-fields.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/meta-fields.ts#L24-L27 type FieldMapping struct { FullName string `json:"full_name"` Mapping map[string]Property `json:"mapping"` @@ -54,7 +55,7 @@ func (s *FieldMapping) UnmarshalJSON(data []byte) error { case "full_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FullName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/fieldmemoryusage.go b/typedapi/types/fieldmemoryusage.go index cf859895a5..d482ba5180 100644 --- a/typedapi/types/fieldmemoryusage.go +++ b/typedapi/types/fieldmemoryusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FieldMemoryUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L118-L121 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L118-L121 type FieldMemoryUsage struct { MemorySize ByteSize `json:"memory_size,omitempty"` MemorySizeInBytes int64 `json:"memory_size_in_bytes"` @@ -53,7 +54,7 @@ func (s *FieldMemoryUsage) UnmarshalJSON(data []byte) error { case "memory_size": if err := dec.Decode(&s.MemorySize); err != nil { - return err + return fmt.Errorf("%s | %w", "MemorySize", err) } case "memory_size_in_bytes": @@ -63,7 +64,7 @@ func (s *FieldMemoryUsage) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MemorySizeInBytes", err) } s.MemorySizeInBytes = value case float64: diff --git a/typedapi/types/fieldmetric.go b/typedapi/types/fieldmetric.go index c36be40411..098621f376 100644 --- a/typedapi/types/fieldmetric.go +++ b/typedapi/types/fieldmetric.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/metric" @@ -31,7 +32,7 @@ import ( // FieldMetric type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/_types/Metric.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/_types/Metric.ts#L30-L35 type FieldMetric struct { // Field The field to collect metrics for. This must be a numeric of some kind. Field string `json:"field"` @@ -57,12 +58,12 @@ func (s *FieldMetric) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "metrics": if err := dec.Decode(&s.Metrics); err != nil { - return err + return fmt.Errorf("%s | %w", "Metrics", err) } } diff --git a/typedapi/types/fieldnamesfield.go b/typedapi/types/fieldnamesfield.go index 190bb30016..4d579529dd 100644 --- a/typedapi/types/fieldnamesfield.go +++ b/typedapi/types/fieldnamesfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FieldNamesField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/meta-fields.ts#L42-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/meta-fields.ts#L42-L44 type FieldNamesField struct { Enabled bool `json:"enabled"` } @@ -57,7 +58,7 @@ func (s *FieldNamesField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: diff --git a/typedapi/types/fieldrule.go b/typedapi/types/fieldrule.go index 24e89a8dad..c90b51319d 100644 --- a/typedapi/types/fieldrule.go +++ b/typedapi/types/fieldrule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // FieldRule type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/RoleMappingRule.ts#L36-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/RoleMappingRule.ts#L36-L44 type FieldRule struct { Dn []string `json:"dn,omitempty"` Groups []string `json:"groups,omitempty"` @@ -57,13 +58,13 @@ func (s *FieldRule) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Dn", err) } s.Dn = append(s.Dn, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Dn); err != nil { - return err + return fmt.Errorf("%s | %w", "Dn", err) } } @@ -73,13 +74,13 @@ func (s *FieldRule) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Groups", err) } s.Groups = append(s.Groups, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Groups); err != nil { - return err + return fmt.Errorf("%s | %w", "Groups", err) } } @@ -89,13 +90,13 @@ func (s *FieldRule) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } s.Username = append(s.Username, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Username); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } } diff --git a/typedapi/types/fields.go b/typedapi/types/fields.go index 863e68687f..cfa5d863cf 100644 --- a/typedapi/types/fields.go +++ b/typedapi/types/fields.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Fields type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L140-L140 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L140-L140 type Fields []string diff --git a/typedapi/types/fieldsecurity.go b/typedapi/types/fieldsecurity.go index dfdcbef4fd..4563e9480a 100644 --- a/typedapi/types/fieldsecurity.go +++ b/typedapi/types/fieldsecurity.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // FieldSecurity type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/FieldSecurity.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/FieldSecurity.ts#L22-L25 type FieldSecurity struct { Except []string `json:"except,omitempty"` Grant []string `json:"grant,omitempty"` @@ -56,13 +57,13 @@ func (s *FieldSecurity) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Except", err) } s.Except = append(s.Except, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Except); err != nil { - return err + return fmt.Errorf("%s | %w", "Except", err) } } @@ -72,13 +73,13 @@ func (s *FieldSecurity) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Grant", err) } s.Grant = append(s.Grant, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Grant); err != nil { - return err + return fmt.Errorf("%s | %w", "Grant", err) } } diff --git a/typedapi/types/fieldsizeusage.go b/typedapi/types/fieldsizeusage.go index 01f90b1a13..fa5daf49ef 100644 --- a/typedapi/types/fieldsizeusage.go +++ b/typedapi/types/fieldsizeusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FieldSizeUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L92-L95 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L92-L95 type FieldSizeUsage struct { Size ByteSize `json:"size,omitempty"` SizeInBytes int64 `json:"size_in_bytes"` @@ -53,7 +54,7 @@ func (s *FieldSizeUsage) UnmarshalJSON(data []byte) error { case "size": if err := dec.Decode(&s.Size); err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } case "size_in_bytes": @@ -63,7 +64,7 @@ func (s *FieldSizeUsage) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SizeInBytes", err) } s.SizeInBytes = value case float64: diff --git a/typedapi/types/fieldsort.go b/typedapi/types/fieldsort.go index fc9bcd8788..c63bb4cbcf 100644 --- a/typedapi/types/fieldsort.go +++ b/typedapi/types/fieldsort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -35,7 +36,7 @@ import ( // FieldSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/sort.ts#L44-L53 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/sort.ts#L44-L53 type FieldSort struct { Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` @@ -69,7 +70,7 @@ func (s *FieldSort) UnmarshalJSON(data []byte) error { case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,32 +81,32 @@ func (s *FieldSort) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } case "nested": if err := dec.Decode(&s.Nested); err != nil { - return err + return fmt.Errorf("%s | %w", "Nested", err) } case "numeric_type": if err := dec.Decode(&s.NumericType); err != nil { - return err + return fmt.Errorf("%s | %w", "NumericType", err) } case "order": if err := dec.Decode(&s.Order); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } case "unmapped_type": if err := dec.Decode(&s.UnmappedType); err != nil { - return err + return fmt.Errorf("%s | %w", "UnmappedType", err) } } diff --git a/typedapi/types/fieldstat.go b/typedapi/types/fieldstat.go index 0ce56060a3..3cb756a7a4 100644 --- a/typedapi/types/fieldstat.go +++ b/typedapi/types/fieldstat.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FieldStat type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/text_structure/find_structure/types.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/text_structure/find_structure/types.ts#L23-L33 type FieldStat struct { Cardinality int `json:"cardinality"` Count int `json:"count"` @@ -66,7 +67,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Cardinality", err) } s.Cardinality = value case float64: @@ -82,7 +83,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -93,7 +94,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case "earliest": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Earliest", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +106,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case "latest": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Latest", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -122,7 +123,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxValue", err) } s.MaxValue = &value case float64: @@ -138,7 +139,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MeanValue", err) } s.MeanValue = &value case float64: @@ -154,7 +155,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MedianValue", err) } s.MedianValue = &value case float64: @@ -170,7 +171,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinValue", err) } s.MinValue = &value case float64: @@ -180,7 +181,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case "top_hits": if err := dec.Decode(&s.TopHits); err != nil { - return err + return fmt.Errorf("%s | %w", "TopHits", err) } } diff --git a/typedapi/types/fieldstatistics.go b/typedapi/types/fieldstatistics.go index af86cb8fce..9c8d7a5968 100644 --- a/typedapi/types/fieldstatistics.go +++ b/typedapi/types/fieldstatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FieldStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/termvectors/types.ts#L28-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/termvectors/types.ts#L28-L32 type FieldStatistics struct { DocCount int `json:"doc_count"` SumDocFreq int64 `json:"sum_doc_freq"` @@ -60,7 +61,7 @@ func (s *FieldStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -75,7 +76,7 @@ func (s *FieldStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SumDocFreq", err) } s.SumDocFreq = value case float64: @@ -90,7 +91,7 @@ func (s *FieldStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SumTtf", err) } s.SumTtf = value case float64: diff --git a/typedapi/types/fieldsuggester.go b/typedapi/types/fieldsuggester.go index 6c62c8852e..c5d47ace58 100644 --- a/typedapi/types/fieldsuggester.go +++ b/typedapi/types/fieldsuggester.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FieldSuggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L106-L139 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L106-L139 type FieldSuggester struct { // Completion Provides auto-complete/search-as-you-type functionality. Completion *CompletionSuggester `json:"completion,omitempty"` @@ -65,18 +66,18 @@ func (s *FieldSuggester) UnmarshalJSON(data []byte) error { case "completion": if err := dec.Decode(&s.Completion); err != nil { - return err + return fmt.Errorf("%s | %w", "Completion", err) } case "phrase": if err := dec.Decode(&s.Phrase); err != nil { - return err + return fmt.Errorf("%s | %w", "Phrase", err) } case "prefix": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Prefix", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -88,7 +89,7 @@ func (s *FieldSuggester) UnmarshalJSON(data []byte) error { case "regex": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Regex", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -99,13 +100,13 @@ func (s *FieldSuggester) UnmarshalJSON(data []byte) error { case "term": if err := dec.Decode(&s.Term); err != nil { - return err + return fmt.Errorf("%s | %w", "Term", err) } case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/fieldsummary.go b/typedapi/types/fieldsummary.go index 106105c498..0d140fac3a 100644 --- a/typedapi/types/fieldsummary.go +++ b/typedapi/types/fieldsummary.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // FieldSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L54-L63 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L54-L63 type FieldSummary struct { Any uint `json:"any"` DocValues uint `json:"doc_values"` diff --git a/typedapi/types/fieldsusagebody.go b/typedapi/types/fieldsusagebody.go index f77bcc9c96..0610e6f9bd 100644 --- a/typedapi/types/fieldsusagebody.go +++ b/typedapi/types/fieldsusagebody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -27,7 +27,7 @@ import ( // FieldsUsageBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L32-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L32-L36 type FieldsUsageBody struct { FieldsUsageBody map[string]UsageStatsIndex `json:"-"` Shards_ ShardStatistics `json:"_shards"` diff --git a/typedapi/types/fieldtypes.go b/typedapi/types/fieldtypes.go index f115324a65..f3402756ef 100644 --- a/typedapi/types/fieldtypes.go +++ b/typedapi/types/fieldtypes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FieldTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L136-L167 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L136-L167 type FieldTypes struct { // Count The number of occurrences of the field type in selected nodes. Count int `json:"count"` @@ -74,7 +75,7 @@ func (s *FieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -90,7 +91,7 @@ func (s *FieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexCount", err) } s.IndexCount = value case float64: @@ -105,7 +106,7 @@ func (s *FieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexedVectorCount", err) } s.IndexedVectorCount = &value case float64: @@ -120,7 +121,7 @@ func (s *FieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexedVectorDimMax", err) } s.IndexedVectorDimMax = &value case float64: @@ -135,7 +136,7 @@ func (s *FieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexedVectorDimMin", err) } s.IndexedVectorDimMin = &value case float64: @@ -145,7 +146,7 @@ func (s *FieldTypes) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "script_count": @@ -156,7 +157,7 @@ func (s *FieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptCount", err) } s.ScriptCount = &value case float64: diff --git a/typedapi/types/fieldtypesmappings.go b/typedapi/types/fieldtypesmappings.go index 1d85aca30c..512620caef 100644 --- a/typedapi/types/fieldtypesmappings.go +++ b/typedapi/types/fieldtypesmappings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FieldTypesMappings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L109-L134 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L109-L134 type FieldTypesMappings struct { // FieldTypes Contains statistics about field data types used in selected nodes. FieldTypes []FieldTypes `json:"field_types"` @@ -64,12 +65,12 @@ func (s *FieldTypesMappings) UnmarshalJSON(data []byte) error { case "field_types": if err := dec.Decode(&s.FieldTypes); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldTypes", err) } case "runtime_field_types": if err := dec.Decode(&s.RuntimeFieldTypes); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeFieldTypes", err) } case "total_deduplicated_field_count": @@ -80,7 +81,7 @@ func (s *FieldTypesMappings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalDeduplicatedFieldCount", err) } s.TotalDeduplicatedFieldCount = &value case float64: @@ -90,7 +91,7 @@ func (s *FieldTypesMappings) UnmarshalJSON(data []byte) error { case "total_deduplicated_mapping_size": if err := dec.Decode(&s.TotalDeduplicatedMappingSize); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalDeduplicatedMappingSize", err) } case "total_deduplicated_mapping_size_in_bytes": @@ -100,7 +101,7 @@ func (s *FieldTypesMappings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalDeduplicatedMappingSizeInBytes", err) } s.TotalDeduplicatedMappingSizeInBytes = &value case float64: @@ -116,7 +117,7 @@ func (s *FieldTypesMappings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalFieldCount", err) } s.TotalFieldCount = &value case float64: diff --git a/typedapi/types/fieldvalue.go b/typedapi/types/fieldvalue.go index 7f53965d34..ce77fc4705 100644 --- a/typedapi/types/fieldvalue.go +++ b/typedapi/types/fieldvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -29,5 +29,5 @@ package types // nil // json.RawMessage // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L25-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L25-L37 type FieldValue interface{} diff --git a/typedapi/types/fieldvaluefactorscorefunction.go b/typedapi/types/fieldvaluefactorscorefunction.go index 71ce7a88d7..67338f2c01 100644 --- a/typedapi/types/fieldvaluefactorscorefunction.go +++ b/typedapi/types/fieldvaluefactorscorefunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // FieldValueFactorScoreFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L132-L151 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L132-L151 type FieldValueFactorScoreFunction struct { // Factor Optional factor to multiply the field value with. Factor *Float64 `json:"factor,omitempty"` @@ -68,7 +69,7 @@ func (s *FieldValueFactorScoreFunction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Factor", err) } f := Float64(value) s.Factor = &f @@ -79,7 +80,7 @@ func (s *FieldValueFactorScoreFunction) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "missing": @@ -89,7 +90,7 @@ func (s *FieldValueFactorScoreFunction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } f := Float64(value) s.Missing = &f @@ -100,7 +101,7 @@ func (s *FieldValueFactorScoreFunction) UnmarshalJSON(data []byte) error { case "modifier": if err := dec.Decode(&s.Modifier); err != nil { - return err + return fmt.Errorf("%s | %w", "Modifier", err) } } diff --git a/typedapi/types/filecountsnapshotstats.go b/typedapi/types/filecountsnapshotstats.go index 17712f3cef..752583de75 100644 --- a/typedapi/types/filecountsnapshotstats.go +++ b/typedapi/types/filecountsnapshotstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FileCountSnapshotStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/FileCountSnapshotStats.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/FileCountSnapshotStats.ts#L22-L25 type FileCountSnapshotStats struct { FileCount int `json:"file_count"` SizeInBytes int64 `json:"size_in_bytes"` @@ -59,7 +60,7 @@ func (s *FileCountSnapshotStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FileCount", err) } s.FileCount = value case float64: @@ -74,7 +75,7 @@ func (s *FileCountSnapshotStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SizeInBytes", err) } s.SizeInBytes = value case float64: diff --git a/typedapi/types/filedetails.go b/typedapi/types/filedetails.go index 2d5e4b928b..54ec2bc00f 100644 --- a/typedapi/types/filedetails.go +++ b/typedapi/types/filedetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FileDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/recovery/types.ts#L50-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/recovery/types.ts#L50-L54 type FileDetails struct { Length int64 `json:"length"` Name string `json:"name"` @@ -59,7 +60,7 @@ func (s *FileDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Length", err) } s.Length = value case float64: @@ -70,7 +71,7 @@ func (s *FileDetails) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -86,7 +87,7 @@ func (s *FileDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Recovered", err) } s.Recovered = value case float64: diff --git a/typedapi/types/filesystem.go b/typedapi/types/filesystem.go index 01f2b568ae..983ff121b3 100644 --- a/typedapi/types/filesystem.go +++ b/typedapi/types/filesystem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FileSystem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L698-L716 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L698-L716 type FileSystem struct { // Data List of all file stores. Data []DataPathStats `json:"data,omitempty"` @@ -60,12 +61,12 @@ func (s *FileSystem) UnmarshalJSON(data []byte) error { case "data": if err := dec.Decode(&s.Data); err != nil { - return err + return fmt.Errorf("%s | %w", "Data", err) } case "io_stats": if err := dec.Decode(&s.IoStats); err != nil { - return err + return fmt.Errorf("%s | %w", "IoStats", err) } case "timestamp": @@ -75,7 +76,7 @@ func (s *FileSystem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } s.Timestamp = &value case float64: @@ -85,7 +86,7 @@ func (s *FileSystem) UnmarshalJSON(data []byte) error { case "total": if err := dec.Decode(&s.Total); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } } diff --git a/typedapi/types/filesystemtotal.go b/typedapi/types/filesystemtotal.go index d0409a1aff..e915b9aa26 100644 --- a/typedapi/types/filesystemtotal.go +++ b/typedapi/types/filesystemtotal.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FileSystemTotal type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L757-L786 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L757-L786 type FileSystemTotal struct { // Available Total disk space available to this Java virtual machine on all file stores. // Depending on OS or process level restrictions, this might appear less than @@ -73,7 +74,7 @@ func (s *FileSystemTotal) UnmarshalJSON(data []byte) error { case "available": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -89,7 +90,7 @@ func (s *FileSystemTotal) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AvailableInBytes", err) } s.AvailableInBytes = &value case float64: @@ -100,7 +101,7 @@ func (s *FileSystemTotal) UnmarshalJSON(data []byte) error { case "free": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Free", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -116,7 +117,7 @@ func (s *FileSystemTotal) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FreeInBytes", err) } s.FreeInBytes = &value case float64: @@ -127,7 +128,7 @@ func (s *FileSystemTotal) UnmarshalJSON(data []byte) error { case "total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -143,7 +144,7 @@ func (s *FileSystemTotal) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalInBytes", err) } s.TotalInBytes = &value case float64: diff --git a/typedapi/types/fillmaskinferenceoptions.go b/typedapi/types/fillmaskinferenceoptions.go index 4081d34dd2..70fc2ac2c5 100644 --- a/typedapi/types/fillmaskinferenceoptions.go +++ b/typedapi/types/fillmaskinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FillMaskInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L266-L280 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L266-L280 type FillMaskInferenceOptions struct { // MaskToken The string/token which will be removed from incoming documents and replaced // with the inference prediction(s). @@ -69,7 +70,7 @@ func (s *FillMaskInferenceOptions) UnmarshalJSON(data []byte) error { case "mask_token": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaskToken", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -86,7 +87,7 @@ func (s *FillMaskInferenceOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopClasses", err) } s.NumTopClasses = &value case float64: @@ -97,7 +98,7 @@ func (s *FillMaskInferenceOptions) UnmarshalJSON(data []byte) error { case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -108,7 +109,7 @@ func (s *FillMaskInferenceOptions) UnmarshalJSON(data []byte) error { case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/types/fillmaskinferenceupdateoptions.go b/typedapi/types/fillmaskinferenceupdateoptions.go index b7a1d530be..73e6a4cd91 100644 --- a/typedapi/types/fillmaskinferenceupdateoptions.go +++ b/typedapi/types/fillmaskinferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FillMaskInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L411-L418 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L411-L418 type FillMaskInferenceUpdateOptions struct { // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` @@ -64,7 +65,7 @@ func (s *FillMaskInferenceUpdateOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopClasses", err) } s.NumTopClasses = &value case float64: @@ -75,7 +76,7 @@ func (s *FillMaskInferenceUpdateOptions) UnmarshalJSON(data []byte) error { case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -86,7 +87,7 @@ func (s *FillMaskInferenceUpdateOptions) UnmarshalJSON(data []byte) error { case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/types/filteraggregate.go b/typedapi/types/filteraggregate.go index 35ceb734bc..19a2eb479c 100644 --- a/typedapi/types/filteraggregate.go +++ b/typedapi/types/filteraggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // FilterAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L495-L496 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L495-L496 type FilterAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *FilterAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -71,7 +71,7 @@ func (s *FilterAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } default: @@ -88,490 +88,490 @@ func (s *FilterAggregate) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -581,7 +581,7 @@ func (s *FilterAggregate) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/filterref.go b/typedapi/types/filterref.go index 3bb0150d50..53c7d4c71c 100644 --- a/typedapi/types/filterref.go +++ b/typedapi/types/filterref.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/filtertype" @@ -31,7 +32,7 @@ import ( // FilterRef type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Filter.ts#L31-L41 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Filter.ts#L31-L41 type FilterRef struct { // FilterId The identifier for the filter. FilterId string `json:"filter_id"` @@ -57,12 +58,12 @@ func (s *FilterRef) UnmarshalJSON(data []byte) error { case "filter_id": if err := dec.Decode(&s.FilterId); err != nil { - return err + return fmt.Errorf("%s | %w", "FilterId", err) } case "filter_type": if err := dec.Decode(&s.FilterType); err != nil { - return err + return fmt.Errorf("%s | %w", "FilterType", err) } } diff --git a/typedapi/types/filtersaggregate.go b/typedapi/types/filtersaggregate.go index b1bb0a1e65..9bfc492a53 100644 --- a/typedapi/types/filtersaggregate.go +++ b/typedapi/types/filtersaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // FiltersAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L568-L569 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L568-L569 type FiltersAggregate struct { Buckets BucketsFiltersBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *FiltersAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]FiltersBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []FiltersBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/filtersaggregation.go b/typedapi/types/filtersaggregation.go index 4e35549d53..88902da066 100644 --- a/typedapi/types/filtersaggregation.go +++ b/typedapi/types/filtersaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FiltersAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L358-L378 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L358-L378 type FiltersAggregation struct { // Filters Collection of queries from which to build buckets. Filters BucketsQuery `json:"filters,omitempty"` @@ -71,13 +72,13 @@ func (s *FiltersAggregation) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]Query, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Filters", err) } s.Filters = o case '[': o := []Query{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Filters", err) } s.Filters = o } @@ -89,7 +90,7 @@ func (s *FiltersAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Keyed", err) } s.Keyed = &value case bool: @@ -98,13 +99,13 @@ func (s *FiltersAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -120,7 +121,7 @@ func (s *FiltersAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OtherBucket", err) } s.OtherBucket = &value case bool: @@ -130,7 +131,7 @@ func (s *FiltersAggregation) UnmarshalJSON(data []byte) error { case "other_bucket_key": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "OtherBucketKey", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/filtersbucket.go b/typedapi/types/filtersbucket.go index 1e028a8781..ff8f8186fe 100644 --- a/typedapi/types/filtersbucket.go +++ b/typedapi/types/filtersbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // FiltersBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L571-L571 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L571-L571 type FiltersBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -60,7 +60,7 @@ func (s *FiltersBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -82,490 +82,490 @@ func (s *FiltersBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -575,7 +575,7 @@ func (s *FiltersBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/fingerprintanalyzer.go b/typedapi/types/fingerprintanalyzer.go index 738263c90b..00860a8c67 100644 --- a/typedapi/types/fingerprintanalyzer.go +++ b/typedapi/types/fingerprintanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FingerprintAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/analyzers.ts#L37-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/analyzers.ts#L37-L45 type FingerprintAnalyzer struct { MaxOutputSize int `json:"max_output_size"` PreserveOriginal bool `json:"preserve_original"` @@ -64,7 +65,7 @@ func (s *FingerprintAnalyzer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOutputSize", err) } s.MaxOutputSize = value case float64: @@ -79,7 +80,7 @@ func (s *FingerprintAnalyzer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PreserveOriginal", err) } s.PreserveOriginal = value case bool: @@ -89,7 +90,7 @@ func (s *FingerprintAnalyzer) UnmarshalJSON(data []byte) error { case "separator": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Separator", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,20 +105,20 @@ func (s *FingerprintAnalyzer) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } s.Stopwords = append(s.Stopwords, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } } case "stopwords_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "StopwordsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -128,12 +129,12 @@ func (s *FingerprintAnalyzer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/fingerprinttokenfilter.go b/typedapi/types/fingerprinttokenfilter.go index 58e8221478..fc5b9ede2b 100644 --- a/typedapi/types/fingerprinttokenfilter.go +++ b/typedapi/types/fingerprinttokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FingerprintTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L194-L198 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L194-L198 type FingerprintTokenFilter struct { MaxOutputSize *int `json:"max_output_size,omitempty"` Separator *string `json:"separator,omitempty"` @@ -61,7 +62,7 @@ func (s *FingerprintTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOutputSize", err) } s.MaxOutputSize = &value case float64: @@ -72,7 +73,7 @@ func (s *FingerprintTokenFilter) UnmarshalJSON(data []byte) error { case "separator": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Separator", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,12 +84,12 @@ func (s *FingerprintTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/flattened.go b/typedapi/types/flattened.go index 1cb05d18e7..666ff0a652 100644 --- a/typedapi/types/flattened.go +++ b/typedapi/types/flattened.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Flattened type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L356-L358 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L356-L358 type Flattened struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -59,7 +60,7 @@ func (s *Flattened) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -73,7 +74,7 @@ func (s *Flattened) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -88,7 +89,7 @@ func (s *Flattened) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FieldCount", err) } s.FieldCount = value case float64: diff --git a/typedapi/types/flattenedproperty.go b/typedapi/types/flattenedproperty.go index 120cabfb43..97aaa259eb 100644 --- a/typedapi/types/flattenedproperty.go +++ b/typedapi/types/flattenedproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // FlattenedProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/complex.ts#L26-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/complex.ts#L26-L37 type FlattenedProperty struct { Boost *Float64 `json:"boost,omitempty"` DepthLimit *int `json:"depth_limit,omitempty"` @@ -75,7 +76,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -92,7 +93,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DepthLimit", err) } s.DepthLimit = &value case float64: @@ -107,7 +108,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -116,7 +117,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "eager_global_ordinals": @@ -126,7 +127,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EagerGlobalOrdinals", err) } s.EagerGlobalOrdinals = &value case bool: @@ -448,7 +449,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -463,7 +464,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -472,7 +473,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { case "index_options": if err := dec.Decode(&s.IndexOptions); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexOptions", err) } case "meta": @@ -480,13 +481,13 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -805,7 +806,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -821,7 +822,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SplitQueriesOnWhitespace", err) } s.SplitQueriesOnWhitespace = &value case bool: @@ -830,7 +831,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/floatnumberproperty.go b/typedapi/types/floatnumberproperty.go index ed5674a097..17140cf9c6 100644 --- a/typedapi/types/floatnumberproperty.go +++ b/typedapi/types/floatnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // FloatNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L134-L137 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L134-L137 type FloatNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -84,7 +85,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -100,7 +101,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -113,13 +114,13 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -130,7 +131,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -139,7 +140,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -457,7 +458,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -472,7 +473,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -486,7 +487,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -498,7 +499,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": @@ -508,7 +509,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } f := float32(value) s.NullValue = &f @@ -519,7 +520,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case "on_script_error": if err := dec.Decode(&s.OnScriptError); err != nil { - return err + return fmt.Errorf("%s | %w", "OnScriptError", err) } case "properties": @@ -832,7 +833,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -841,7 +842,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -850,7 +851,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -858,7 +859,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -868,7 +869,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -884,7 +885,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -898,7 +899,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesDimension", err) } s.TimeSeriesDimension = &value case bool: @@ -907,12 +908,12 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case "time_series_metric": if err := dec.Decode(&s.TimeSeriesMetric); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesMetric", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/floatrangeproperty.go b/typedapi/types/floatrangeproperty.go index 4a8dba206c..20f2bc41cc 100644 --- a/typedapi/types/floatrangeproperty.go +++ b/typedapi/types/floatrangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // FloatRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/range.ts#L38-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/range.ts#L38-L40 type FloatRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -72,7 +73,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -88,7 +89,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -101,13 +102,13 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -118,7 +119,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -127,7 +128,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -445,7 +446,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -460,7 +461,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -472,7 +473,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -785,7 +786,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -801,7 +802,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -810,7 +811,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/flushstats.go b/typedapi/types/flushstats.go index 98cd8c6ce0..e0600f48bd 100644 --- a/typedapi/types/flushstats.go +++ b/typedapi/types/flushstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FlushStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L123-L128 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L123-L128 type FlushStats struct { Periodic int64 `json:"periodic"` Total int64 `json:"total"` @@ -60,7 +61,7 @@ func (s *FlushStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Periodic", err) } s.Periodic = value case float64: @@ -75,7 +76,7 @@ func (s *FlushStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: @@ -85,12 +86,12 @@ func (s *FlushStats) UnmarshalJSON(data []byte) error { case "total_time": if err := dec.Decode(&s.TotalTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTime", err) } case "total_time_in_millis": if err := dec.Decode(&s.TotalTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTimeInMillis", err) } } diff --git a/typedapi/types/followerindex.go b/typedapi/types/followerindex.go index 501a749e3d..5e684c96dd 100644 --- a/typedapi/types/followerindex.go +++ b/typedapi/types/followerindex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/followerindexstatus" @@ -31,7 +32,7 @@ import ( // FollowerIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/follow_info/types.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/follow_info/types.ts#L22-L28 type FollowerIndex struct { FollowerIndex string `json:"follower_index"` LeaderIndex string `json:"leader_index"` @@ -57,27 +58,27 @@ func (s *FollowerIndex) UnmarshalJSON(data []byte) error { case "follower_index": if err := dec.Decode(&s.FollowerIndex); err != nil { - return err + return fmt.Errorf("%s | %w", "FollowerIndex", err) } case "leader_index": if err := dec.Decode(&s.LeaderIndex); err != nil { - return err + return fmt.Errorf("%s | %w", "LeaderIndex", err) } case "parameters": if err := dec.Decode(&s.Parameters); err != nil { - return err + return fmt.Errorf("%s | %w", "Parameters", err) } case "remote_cluster": if err := dec.Decode(&s.RemoteCluster); err != nil { - return err + return fmt.Errorf("%s | %w", "RemoteCluster", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } } diff --git a/typedapi/types/followerindexparameters.go b/typedapi/types/followerindexparameters.go index db794b3fca..581bd38cbf 100644 --- a/typedapi/types/followerindexparameters.go +++ b/typedapi/types/followerindexparameters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FollowerIndexParameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/follow_info/types.ts#L38-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/follow_info/types.ts#L38-L49 type FollowerIndexParameters struct { MaxOutstandingReadRequests int `json:"max_outstanding_read_requests"` MaxOutstandingWriteRequests int `json:"max_outstanding_write_requests"` @@ -67,7 +68,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOutstandingReadRequests", err) } s.MaxOutstandingReadRequests = value case float64: @@ -83,7 +84,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOutstandingWriteRequests", err) } s.MaxOutstandingWriteRequests = value case float64: @@ -99,7 +100,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxReadRequestOperationCount", err) } s.MaxReadRequestOperationCount = value case float64: @@ -110,7 +111,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case "max_read_request_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxReadRequestSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -121,7 +122,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case "max_retry_delay": if err := dec.Decode(&s.MaxRetryDelay); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxRetryDelay", err) } case "max_write_buffer_count": @@ -132,7 +133,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteBufferCount", err) } s.MaxWriteBufferCount = value case float64: @@ -143,7 +144,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case "max_write_buffer_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteBufferSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -160,7 +161,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteRequestOperationCount", err) } s.MaxWriteRequestOperationCount = value case float64: @@ -171,7 +172,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case "max_write_request_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWriteRequestSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -182,7 +183,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case "read_poll_timeout": if err := dec.Decode(&s.ReadPollTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "ReadPollTimeout", err) } } diff --git a/typedapi/types/followindexstats.go b/typedapi/types/followindexstats.go index af2059e8d1..30a139a2d6 100644 --- a/typedapi/types/followindexstats.go +++ b/typedapi/types/followindexstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // FollowIndexStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/_types/FollowIndexStats.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/_types/FollowIndexStats.ts#L30-L33 type FollowIndexStats struct { Index string `json:"index"` Shards []CcrShardStats `json:"shards"` @@ -52,12 +53,12 @@ func (s *FollowIndexStats) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "shards": if err := dec.Decode(&s.Shards); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards", err) } } diff --git a/typedapi/types/followstats.go b/typedapi/types/followstats.go index fa0a3ca255..f3abb0f5bb 100644 --- a/typedapi/types/followstats.go +++ b/typedapi/types/followstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // FollowStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/stats/types.ts.ts#L41-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/stats/types.ts.ts#L41-L43 type FollowStats struct { Indices []FollowIndexStats `json:"indices"` } diff --git a/typedapi/types/forcemergeconfiguration.go b/typedapi/types/forcemergeconfiguration.go index b637e4e9cc..df14ef690c 100644 --- a/typedapi/types/forcemergeconfiguration.go +++ b/typedapi/types/forcemergeconfiguration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ForceMergeConfiguration type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/_types/Phase.ts#L56-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/_types/Phase.ts#L56-L58 type ForceMergeConfiguration struct { MaxNumSegments int `json:"max_num_segments"` } @@ -58,7 +59,7 @@ func (s *ForceMergeConfiguration) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxNumSegments", err) } s.MaxNumSegments = value case float64: diff --git a/typedapi/types/forcemergeresponsebody.go b/typedapi/types/forcemergeresponsebody.go index 03fc201096..c141571f8c 100644 --- a/typedapi/types/forcemergeresponsebody.go +++ b/typedapi/types/forcemergeresponsebody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ForceMergeResponseBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/forcemerge/_types/response.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/forcemerge/_types/response.ts#L22-L28 type ForceMergeResponseBody struct { Shards_ ShardStatistics `json:"_shards"` // Task task contains a task id returned when wait_for_completion=false, @@ -55,13 +56,13 @@ func (s *ForceMergeResponseBody) UnmarshalJSON(data []byte) error { case "_shards": if err := dec.Decode(&s.Shards_); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards_", err) } case "task": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Task", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/foreachprocessor.go b/typedapi/types/foreachprocessor.go index ba3b42f930..6949848afa 100644 --- a/typedapi/types/foreachprocessor.go +++ b/typedapi/types/foreachprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ForeachProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L656-L670 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L656-L670 type ForeachProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -71,7 +72,7 @@ func (s *ForeachProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,13 +83,13 @@ func (s *ForeachProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,7 +105,7 @@ func (s *ForeachProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -118,7 +119,7 @@ func (s *ForeachProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -127,18 +128,18 @@ func (s *ForeachProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "processor": if err := dec.Decode(&s.Processor); err != nil { - return err + return fmt.Errorf("%s | %w", "Processor", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/formattablemetricaggregation.go b/typedapi/types/formattablemetricaggregation.go index 804d363614..5f1603d2a6 100644 --- a/typedapi/types/formattablemetricaggregation.go +++ b/typedapi/types/formattablemetricaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FormattableMetricAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L51-L53 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L51-L53 type FormattableMetricAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -58,13 +59,13 @@ func (s *FormattableMetricAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -75,13 +76,13 @@ func (s *FormattableMetricAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -90,7 +91,7 @@ func (s *FormattableMetricAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -99,7 +100,7 @@ func (s *FormattableMetricAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -107,7 +108,7 @@ func (s *FormattableMetricAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/foundstatus.go b/typedapi/types/foundstatus.go index be56d09135..026369a882 100644 --- a/typedapi/types/foundstatus.go +++ b/typedapi/types/foundstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FoundStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/delete_privileges/types.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/delete_privileges/types.ts#L20-L22 type FoundStatus struct { Found bool `json:"found"` } @@ -57,7 +58,7 @@ func (s *FoundStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Found", err) } s.Found = value case bool: diff --git a/typedapi/types/frequencyencodingpreprocessor.go b/typedapi/types/frequencyencodingpreprocessor.go index 5f080edc1e..1d4f4076de 100644 --- a/typedapi/types/frequencyencodingpreprocessor.go +++ b/typedapi/types/frequencyencodingpreprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FrequencyEncodingPreprocessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/types.ts#L38-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/types.ts#L38-L42 type FrequencyEncodingPreprocessor struct { FeatureName string `json:"feature_name"` Field string `json:"field"` @@ -55,7 +56,7 @@ func (s *FrequencyEncodingPreprocessor) UnmarshalJSON(data []byte) error { case "feature_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -67,7 +68,7 @@ func (s *FrequencyEncodingPreprocessor) UnmarshalJSON(data []byte) error { case "field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -81,7 +82,7 @@ func (s *FrequencyEncodingPreprocessor) UnmarshalJSON(data []byte) error { s.FrequencyMap = make(map[string]Float64, 0) } if err := dec.Decode(&s.FrequencyMap); err != nil { - return err + return fmt.Errorf("%s | %w", "FrequencyMap", err) } } diff --git a/typedapi/types/frequentitemsetsaggregate.go b/typedapi/types/frequentitemsetsaggregate.go index dbff475f21..7cf3fe3deb 100644 --- a/typedapi/types/frequentitemsetsaggregate.go +++ b/typedapi/types/frequentitemsetsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // FrequentItemSetsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L639-L640 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L639-L640 type FrequentItemSetsAggregate struct { Buckets BucketsFrequentItemSetsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *FrequentItemSetsAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]FrequentItemSetsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []FrequentItemSetsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/frequentitemsetsaggregation.go b/typedapi/types/frequentitemsetsaggregation.go index 054dac416b..8958e4fce2 100644 --- a/typedapi/types/frequentitemsetsaggregation.go +++ b/typedapi/types/frequentitemsetsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FrequentItemSetsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L1159-L1183 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L1159-L1183 type FrequentItemSetsAggregation struct { // Fields Fields to analyze. Fields []FrequentItemSetsField `json:"fields"` @@ -61,12 +62,12 @@ func (s *FrequentItemSetsAggregation) UnmarshalJSON(data []byte) error { case "fields": if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "minimum_set_size": @@ -77,7 +78,7 @@ func (s *FrequentItemSetsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumSetSize", err) } s.MinimumSetSize = &value case float64: @@ -92,7 +93,7 @@ func (s *FrequentItemSetsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumSupport", err) } f := Float64(value) s.MinimumSupport = &f @@ -109,7 +110,7 @@ func (s *FrequentItemSetsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: diff --git a/typedapi/types/frequentitemsetsbucket.go b/typedapi/types/frequentitemsetsbucket.go index e353622742..af28172d82 100644 --- a/typedapi/types/frequentitemsetsbucket.go +++ b/typedapi/types/frequentitemsetsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // FrequentItemSetsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L642-L645 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L642-L645 type FrequentItemSetsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -62,7 +62,7 @@ func (s *FrequentItemSetsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -75,7 +75,7 @@ func (s *FrequentItemSetsBucket) UnmarshalJSON(data []byte) error { s.Key = make(map[string][]string, 0) } if err := dec.Decode(&s.Key); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } case "support": @@ -85,7 +85,7 @@ func (s *FrequentItemSetsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Support", err) } f := Float64(value) s.Support = f @@ -108,490 +108,490 @@ func (s *FrequentItemSetsBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -601,7 +601,7 @@ func (s *FrequentItemSetsBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/frequentitemsetsfield.go b/typedapi/types/frequentitemsetsfield.go index f1eaff0f62..ef66902efb 100644 --- a/typedapi/types/frequentitemsetsfield.go +++ b/typedapi/types/frequentitemsetsfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // FrequentItemSetsField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L1145-L1157 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L1145-L1157 type FrequentItemSetsField struct { // Exclude Values to exclude. // Can be regular expression strings or arrays of strings of exact terms. @@ -61,24 +62,24 @@ func (s *FrequentItemSetsField) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Exclude", err) } s.Exclude = append(s.Exclude, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Exclude); err != nil { - return err + return fmt.Errorf("%s | %w", "Exclude", err) } } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "include": if err := dec.Decode(&s.Include); err != nil { - return err + return fmt.Errorf("%s | %w", "Include", err) } } diff --git a/typedapi/types/frozenindices.go b/typedapi/types/frozenindices.go index ca802db788..b2af7c8680 100644 --- a/typedapi/types/frozenindices.go +++ b/typedapi/types/frozenindices.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FrozenIndices type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L360-L362 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L360-L362 type FrozenIndices struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -59,7 +60,7 @@ func (s *FrozenIndices) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -73,7 +74,7 @@ func (s *FrozenIndices) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -87,7 +88,7 @@ func (s *FrozenIndices) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesCount", err) } s.IndicesCount = value case float64: diff --git a/typedapi/types/functionscore.go b/typedapi/types/functionscore.go index f48761ccd6..98e77397ee 100644 --- a/typedapi/types/functionscore.go +++ b/typedapi/types/functionscore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FunctionScore type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L201-L241 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L201-L241 type FunctionScore struct { // Exp Function that scores a document with a exponential decay, depending on the // distance of a numeric field value of the document from an origin. @@ -75,37 +76,37 @@ func (s *FunctionScore) UnmarshalJSON(data []byte) error { case "exp": if err := dec.Decode(&s.Exp); err != nil { - return err + return fmt.Errorf("%s | %w", "Exp", err) } case "field_value_factor": if err := dec.Decode(&s.FieldValueFactor); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldValueFactor", err) } case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "gauss": if err := dec.Decode(&s.Gauss); err != nil { - return err + return fmt.Errorf("%s | %w", "Gauss", err) } case "linear": if err := dec.Decode(&s.Linear); err != nil { - return err + return fmt.Errorf("%s | %w", "Linear", err) } case "random_score": if err := dec.Decode(&s.RandomScore); err != nil { - return err + return fmt.Errorf("%s | %w", "RandomScore", err) } case "script_score": if err := dec.Decode(&s.ScriptScore); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptScore", err) } case "weight": @@ -115,7 +116,7 @@ func (s *FunctionScore) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Weight", err) } f := Float64(value) s.Weight = &f diff --git a/typedapi/types/functionscorequery.go b/typedapi/types/functionscorequery.go index eab26dd4bd..c04ae76316 100644 --- a/typedapi/types/functionscorequery.go +++ b/typedapi/types/functionscorequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // FunctionScoreQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L92-L118 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L92-L118 type FunctionScoreQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -79,7 +80,7 @@ func (s *FunctionScoreQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -90,12 +91,12 @@ func (s *FunctionScoreQuery) UnmarshalJSON(data []byte) error { case "boost_mode": if err := dec.Decode(&s.BoostMode); err != nil { - return err + return fmt.Errorf("%s | %w", "BoostMode", err) } case "functions": if err := dec.Decode(&s.Functions); err != nil { - return err + return fmt.Errorf("%s | %w", "Functions", err) } case "max_boost": @@ -105,7 +106,7 @@ func (s *FunctionScoreQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxBoost", err) } f := Float64(value) s.MaxBoost = &f @@ -121,7 +122,7 @@ func (s *FunctionScoreQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinScore", err) } f := Float64(value) s.MinScore = &f @@ -132,13 +133,13 @@ func (s *FunctionScoreQuery) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -149,7 +150,7 @@ func (s *FunctionScoreQuery) UnmarshalJSON(data []byte) error { case "score_mode": if err := dec.Decode(&s.ScoreMode); err != nil { - return err + return fmt.Errorf("%s | %w", "ScoreMode", err) } } diff --git a/typedapi/types/fuzziness.go b/typedapi/types/fuzziness.go index d623a4ebca..a7980466a6 100644 --- a/typedapi/types/fuzziness.go +++ b/typedapi/types/fuzziness.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // int // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L133-L134 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L133-L134 type Fuzziness interface{} diff --git a/typedapi/types/fuzzyquery.go b/typedapi/types/fuzzyquery.go index d956f8b672..b65264e3e5 100644 --- a/typedapi/types/fuzzyquery.go +++ b/typedapi/types/fuzzyquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // FuzzyQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L43-L78 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L43-L78 type FuzzyQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -85,7 +86,7 @@ func (s *FuzzyQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -96,7 +97,7 @@ func (s *FuzzyQuery) UnmarshalJSON(data []byte) error { case "fuzziness": if err := dec.Decode(&s.Fuzziness); err != nil { - return err + return fmt.Errorf("%s | %w", "Fuzziness", err) } case "max_expansions": @@ -107,7 +108,7 @@ func (s *FuzzyQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxExpansions", err) } s.MaxExpansions = &value case float64: @@ -123,7 +124,7 @@ func (s *FuzzyQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrefixLength", err) } s.PrefixLength = &value case float64: @@ -134,7 +135,7 @@ func (s *FuzzyQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -145,7 +146,7 @@ func (s *FuzzyQuery) UnmarshalJSON(data []byte) error { case "rewrite": if err := dec.Decode(&s.Rewrite); err != nil { - return err + return fmt.Errorf("%s | %w", "Rewrite", err) } case "transpositions": @@ -155,7 +156,7 @@ func (s *FuzzyQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Transpositions", err) } s.Transpositions = &value case bool: @@ -165,7 +166,7 @@ func (s *FuzzyQuery) UnmarshalJSON(data []byte) error { case "value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/garbagecollector.go b/typedapi/types/garbagecollector.go index d5e5bf880c..5bce1db5dc 100644 --- a/typedapi/types/garbagecollector.go +++ b/typedapi/types/garbagecollector.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // GarbageCollector type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L923-L928 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L923-L928 type GarbageCollector struct { // Collectors Contains statistics about JVM garbage collectors for the node. Collectors map[string]GarbageCollectorTotal `json:"collectors,omitempty"` diff --git a/typedapi/types/garbagecollectortotal.go b/typedapi/types/garbagecollectortotal.go index 37227fb75c..57ba717eaa 100644 --- a/typedapi/types/garbagecollectortotal.go +++ b/typedapi/types/garbagecollectortotal.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GarbageCollectorTotal type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L930-L943 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L930-L943 type GarbageCollectorTotal struct { // CollectionCount Total number of JVM garbage collectors that collect objects. CollectionCount *int64 `json:"collection_count,omitempty"` @@ -62,7 +63,7 @@ func (s *GarbageCollectorTotal) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CollectionCount", err) } s.CollectionCount = &value case float64: @@ -73,7 +74,7 @@ func (s *GarbageCollectorTotal) UnmarshalJSON(data []byte) error { case "collection_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CollectionTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -89,7 +90,7 @@ func (s *GarbageCollectorTotal) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CollectionTimeInMillis", err) } s.CollectionTimeInMillis = &value case float64: diff --git a/typedapi/types/gcsrepository.go b/typedapi/types/gcsrepository.go new file mode 100644 index 0000000000..1ab3682239 --- /dev/null +++ b/typedapi/types/gcsrepository.go @@ -0,0 +1,94 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" +) + +// GcsRepository type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotRepository.ts#L45-L48 +type GcsRepository struct { + Settings GcsRepositorySettings `json:"settings"` + Type string `json:"type,omitempty"` + Uuid *string `json:"uuid,omitempty"` +} + +func (s *GcsRepository) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return fmt.Errorf("%s | %w", "Settings", err) + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return fmt.Errorf("%s | %w", "Type", err) + } + + case "uuid": + if err := dec.Decode(&s.Uuid); err != nil { + return fmt.Errorf("%s | %w", "Uuid", err) + } + + } + } + return nil +} + +// MarshalJSON override marshalling to include literal value +func (s GcsRepository) MarshalJSON() ([]byte, error) { + type innerGcsRepository GcsRepository + tmp := innerGcsRepository{ + Settings: s.Settings, + Type: s.Type, + Uuid: s.Uuid, + } + + tmp.Type = "gcs" + + return json.Marshal(tmp) +} + +// NewGcsRepository returns a GcsRepository. +func NewGcsRepository() *GcsRepository { + r := &GcsRepository{} + + return r +} diff --git a/typedapi/types/gcsrepositorysettings.go b/typedapi/types/gcsrepositorysettings.go new file mode 100644 index 0000000000..8eb6d5b1be --- /dev/null +++ b/typedapi/types/gcsrepositorysettings.go @@ -0,0 +1,163 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// GcsRepositorySettings type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotRepository.ts#L85-L91 +type GcsRepositorySettings struct { + ApplicationName *string `json:"application_name,omitempty"` + BasePath *string `json:"base_path,omitempty"` + Bucket string `json:"bucket"` + ChunkSize ByteSize `json:"chunk_size,omitempty"` + Client *string `json:"client,omitempty"` + Compress *bool `json:"compress,omitempty"` + MaxRestoreBytesPerSec ByteSize `json:"max_restore_bytes_per_sec,omitempty"` + MaxSnapshotBytesPerSec ByteSize `json:"max_snapshot_bytes_per_sec,omitempty"` + Readonly *bool `json:"readonly,omitempty"` +} + +func (s *GcsRepositorySettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "application_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "ApplicationName", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.ApplicationName = &o + + case "base_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "BasePath", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.BasePath = &o + + case "bucket": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Bucket", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Bucket = o + + case "chunk_size": + if err := dec.Decode(&s.ChunkSize); err != nil { + return fmt.Errorf("%s | %w", "ChunkSize", err) + } + + case "client": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Client", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Client = &o + + case "compress": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Compress", err) + } + s.Compress = &value + case bool: + s.Compress = &v + } + + case "max_restore_bytes_per_sec": + if err := dec.Decode(&s.MaxRestoreBytesPerSec); err != nil { + return fmt.Errorf("%s | %w", "MaxRestoreBytesPerSec", err) + } + + case "max_snapshot_bytes_per_sec": + if err := dec.Decode(&s.MaxSnapshotBytesPerSec); err != nil { + return fmt.Errorf("%s | %w", "MaxSnapshotBytesPerSec", err) + } + + case "readonly": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Readonly", err) + } + s.Readonly = &value + case bool: + s.Readonly = &v + } + + } + } + return nil +} + +// NewGcsRepositorySettings returns a GcsRepositorySettings. +func NewGcsRepositorySettings() *GcsRepositorySettings { + r := &GcsRepositorySettings{} + + return r +} diff --git a/typedapi/types/geoboundingboxquery.go b/typedapi/types/geoboundingboxquery.go index 3904c19ed2..12b29783c0 100644 --- a/typedapi/types/geoboundingboxquery.go +++ b/typedapi/types/geoboundingboxquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -34,7 +34,7 @@ import ( // GeoBoundingBoxQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/geo.ts#L32-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/geo.ts#L32-L50 type GeoBoundingBoxQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -77,7 +77,7 @@ func (s *GeoBoundingBoxQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -91,7 +91,7 @@ func (s *GeoBoundingBoxQuery) UnmarshalJSON(data []byte) error { s.GeoBoundingBoxQuery = make(map[string]GeoBounds, 0) } if err := dec.Decode(&s.GeoBoundingBoxQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "GeoBoundingBoxQuery", err) } case "ignore_unmapped": @@ -101,7 +101,7 @@ func (s *GeoBoundingBoxQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnmapped", err) } s.IgnoreUnmapped = &value case bool: @@ -111,7 +111,7 @@ func (s *GeoBoundingBoxQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -122,12 +122,12 @@ func (s *GeoBoundingBoxQuery) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "validation_method": if err := dec.Decode(&s.ValidationMethod); err != nil { - return err + return fmt.Errorf("%s | %w", "ValidationMethod", err) } default: diff --git a/typedapi/types/geobounds.go b/typedapi/types/geobounds.go index e8e796b498..02ba20ac0d 100644 --- a/typedapi/types/geobounds.go +++ b/typedapi/types/geobounds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -27,5 +27,5 @@ package types // TopRightBottomLeftGeoBounds // WktGeoBounds // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Geo.ts#L135-L148 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Geo.ts#L135-L148 type GeoBounds interface{} diff --git a/typedapi/types/geoboundsaggregate.go b/typedapi/types/geoboundsaggregate.go index f5ea696457..1b6ecf0423 100644 --- a/typedapi/types/geoboundsaggregate.go +++ b/typedapi/types/geoboundsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // GeoBoundsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L303-L306 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L303-L306 type GeoBoundsAggregate struct { Bounds GeoBounds `json:"bounds,omitempty"` Meta Metadata `json:"meta,omitempty"` @@ -52,12 +53,12 @@ func (s *GeoBoundsAggregate) UnmarshalJSON(data []byte) error { case "bounds": if err := dec.Decode(&s.Bounds); err != nil { - return err + return fmt.Errorf("%s | %w", "Bounds", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/geoboundsaggregation.go b/typedapi/types/geoboundsaggregation.go index 345fcb2a02..389f9a5a6c 100644 --- a/typedapi/types/geoboundsaggregation.go +++ b/typedapi/types/geoboundsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GeoBoundsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L108-L114 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L108-L114 type GeoBoundsAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -60,18 +61,18 @@ func (s *GeoBoundsAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -80,7 +81,7 @@ func (s *GeoBoundsAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -89,7 +90,7 @@ func (s *GeoBoundsAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -97,7 +98,7 @@ func (s *GeoBoundsAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -111,7 +112,7 @@ func (s *GeoBoundsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "WrapLongitude", err) } s.WrapLongitude = &value case bool: diff --git a/typedapi/types/geocentroidaggregate.go b/typedapi/types/geocentroidaggregate.go index 13ff2b0a12..edd3e08f66 100644 --- a/typedapi/types/geocentroidaggregate.go +++ b/typedapi/types/geocentroidaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GeoCentroidAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L308-L312 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L308-L312 type GeoCentroidAggregate struct { Count int64 `json:"count"` Location GeoLocation `json:"location,omitempty"` @@ -59,7 +60,7 @@ func (s *GeoCentroidAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -69,12 +70,12 @@ func (s *GeoCentroidAggregate) UnmarshalJSON(data []byte) error { case "location": if err := dec.Decode(&s.Location); err != nil { - return err + return fmt.Errorf("%s | %w", "Location", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/geocentroidaggregation.go b/typedapi/types/geocentroidaggregation.go index 38bfe55c6e..75f8a7684e 100644 --- a/typedapi/types/geocentroidaggregation.go +++ b/typedapi/types/geocentroidaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GeoCentroidAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L116-L119 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L116-L119 type GeoCentroidAggregation struct { Count *int64 `json:"count,omitempty"` // Field The field on which to run the aggregation. @@ -64,7 +65,7 @@ func (s *GeoCentroidAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = &value case float64: @@ -74,23 +75,23 @@ func (s *GeoCentroidAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "location": if err := dec.Decode(&s.Location); err != nil { - return err + return fmt.Errorf("%s | %w", "Location", err) } case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -99,7 +100,7 @@ func (s *GeoCentroidAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -108,7 +109,7 @@ func (s *GeoCentroidAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -116,7 +117,7 @@ func (s *GeoCentroidAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/geodecayfunction.go b/typedapi/types/geodecayfunction.go index 3f2cdb0733..09fafc8b59 100644 --- a/typedapi/types/geodecayfunction.go +++ b/typedapi/types/geodecayfunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -29,7 +29,7 @@ import ( // GeoDecayFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L190-L192 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L190-L192 type GeoDecayFunction struct { GeoDecayFunction map[string]DecayPlacementGeoLocationDistance `json:"GeoDecayFunction,omitempty"` // MultiValueMode Determines how the distance is calculated when a field used for computing the diff --git a/typedapi/types/geodistanceaggregate.go b/typedapi/types/geodistanceaggregate.go index 3cb90355d2..f34d60820b 100644 --- a/typedapi/types/geodistanceaggregate.go +++ b/typedapi/types/geodistanceaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // GeoDistanceAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L550-L554 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L550-L554 type GeoDistanceAggregate struct { Buckets BucketsRangeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *GeoDistanceAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]RangeBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []RangeBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/geodistanceaggregation.go b/typedapi/types/geodistanceaggregation.go index 61850691ea..eb54a853ce 100644 --- a/typedapi/types/geodistanceaggregation.go +++ b/typedapi/types/geodistanceaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // GeoDistanceAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L380-L403 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L380-L403 type GeoDistanceAggregation struct { // DistanceType The distance calculation type. DistanceType *geodistancetype.GeoDistanceType `json:"distance_type,omitempty"` @@ -66,23 +67,23 @@ func (s *GeoDistanceAggregation) UnmarshalJSON(data []byte) error { case "distance_type": if err := dec.Decode(&s.DistanceType); err != nil { - return err + return fmt.Errorf("%s | %w", "DistanceType", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -93,17 +94,17 @@ func (s *GeoDistanceAggregation) UnmarshalJSON(data []byte) error { case "origin": if err := dec.Decode(&s.Origin); err != nil { - return err + return fmt.Errorf("%s | %w", "Origin", err) } case "ranges": if err := dec.Decode(&s.Ranges); err != nil { - return err + return fmt.Errorf("%s | %w", "Ranges", err) } case "unit": if err := dec.Decode(&s.Unit); err != nil { - return err + return fmt.Errorf("%s | %w", "Unit", err) } } diff --git a/typedapi/types/geodistancefeaturequery.go b/typedapi/types/geodistancefeaturequery.go index afaba9f65c..f28613f7fb 100644 --- a/typedapi/types/geodistancefeaturequery.go +++ b/typedapi/types/geodistancefeaturequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GeoDistanceFeatureQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L62-L65 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L62-L65 type GeoDistanceFeatureQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -83,7 +84,7 @@ func (s *GeoDistanceFeatureQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -94,23 +95,23 @@ func (s *GeoDistanceFeatureQuery) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "origin": if err := dec.Decode(&s.Origin); err != nil { - return err + return fmt.Errorf("%s | %w", "Origin", err) } case "pivot": if err := dec.Decode(&s.Pivot); err != nil { - return err + return fmt.Errorf("%s | %w", "Pivot", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/geodistancequery.go b/typedapi/types/geodistancequery.go index 8a106c70b0..4cb24a1682 100644 --- a/typedapi/types/geodistancequery.go +++ b/typedapi/types/geodistancequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -34,7 +34,7 @@ import ( // GeoDistanceQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/geo.ts#L57-L85 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/geo.ts#L57-L85 type GeoDistanceQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -83,7 +83,7 @@ func (s *GeoDistanceQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -94,12 +94,12 @@ func (s *GeoDistanceQuery) UnmarshalJSON(data []byte) error { case "distance": if err := dec.Decode(&s.Distance); err != nil { - return err + return fmt.Errorf("%s | %w", "Distance", err) } case "distance_type": if err := dec.Decode(&s.DistanceType); err != nil { - return err + return fmt.Errorf("%s | %w", "DistanceType", err) } case "GeoDistanceQuery": @@ -107,7 +107,7 @@ func (s *GeoDistanceQuery) UnmarshalJSON(data []byte) error { s.GeoDistanceQuery = make(map[string]GeoLocation, 0) } if err := dec.Decode(&s.GeoDistanceQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "GeoDistanceQuery", err) } case "ignore_unmapped": @@ -117,7 +117,7 @@ func (s *GeoDistanceQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnmapped", err) } s.IgnoreUnmapped = &value case bool: @@ -127,7 +127,7 @@ func (s *GeoDistanceQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -138,7 +138,7 @@ func (s *GeoDistanceQuery) UnmarshalJSON(data []byte) error { case "validation_method": if err := dec.Decode(&s.ValidationMethod); err != nil { - return err + return fmt.Errorf("%s | %w", "ValidationMethod", err) } default: diff --git a/typedapi/types/geodistancesort.go b/typedapi/types/geodistancesort.go index 465c94f92e..16955272c8 100644 --- a/typedapi/types/geodistancesort.go +++ b/typedapi/types/geodistancesort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -36,7 +36,7 @@ import ( // GeoDistanceSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/sort.ts#L58-L66 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/sort.ts#L58-L66 type GeoDistanceSort struct { DistanceType *geodistancetype.GeoDistanceType `json:"distance_type,omitempty"` GeoDistanceSort map[string][]GeoLocation `json:"GeoDistanceSort,omitempty"` @@ -63,7 +63,7 @@ func (s *GeoDistanceSort) UnmarshalJSON(data []byte) error { case "distance_type": if err := dec.Decode(&s.DistanceType); err != nil { - return err + return fmt.Errorf("%s | %w", "DistanceType", err) } case "GeoDistanceSort": @@ -78,14 +78,14 @@ func (s *GeoDistanceSort) UnmarshalJSON(data []byte) error { o := new(GeoLocation) err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) if err != nil { - return err + return fmt.Errorf("%s | %w", "GeoDistanceSort", err) } s.GeoDistanceSort[key] = append(s.GeoDistanceSort[key], o) default: o := []GeoLocation{} err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) if err != nil { - return err + return fmt.Errorf("%s | %w", "GeoDistanceSort", err) } s.GeoDistanceSort[key] = o } @@ -98,7 +98,7 @@ func (s *GeoDistanceSort) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnmapped", err) } s.IgnoreUnmapped = &value case bool: @@ -107,17 +107,17 @@ func (s *GeoDistanceSort) UnmarshalJSON(data []byte) error { case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } case "order": if err := dec.Decode(&s.Order); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } case "unit": if err := dec.Decode(&s.Unit); err != nil { - return err + return fmt.Errorf("%s | %w", "Unit", err) } default: diff --git a/typedapi/types/geohashgridaggregate.go b/typedapi/types/geohashgridaggregate.go index d8f21e04ba..f67009e95f 100644 --- a/typedapi/types/geohashgridaggregate.go +++ b/typedapi/types/geohashgridaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // GeoHashGridAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L506-L508 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L506-L508 type GeoHashGridAggregate struct { Buckets BucketsGeoHashGridBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *GeoHashGridAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]GeoHashGridBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []GeoHashGridBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/geohashgridaggregation.go b/typedapi/types/geohashgridaggregation.go index 4d398d3be1..7943059e81 100644 --- a/typedapi/types/geohashgridaggregation.go +++ b/typedapi/types/geohashgridaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GeoHashGridAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L405-L430 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L405-L430 type GeoHashGridAggregation struct { // Bounds The bounding box to filter the points in each bucket. Bounds GeoBounds `json:"bounds,omitempty"` @@ -68,23 +69,23 @@ func (s *GeoHashGridAggregation) UnmarshalJSON(data []byte) error { case "bounds": if err := dec.Decode(&s.Bounds); err != nil { - return err + return fmt.Errorf("%s | %w", "Bounds", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -95,7 +96,7 @@ func (s *GeoHashGridAggregation) UnmarshalJSON(data []byte) error { case "precision": if err := dec.Decode(&s.Precision); err != nil { - return err + return fmt.Errorf("%s | %w", "Precision", err) } case "shard_size": @@ -106,7 +107,7 @@ func (s *GeoHashGridAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardSize", err) } s.ShardSize = &value case float64: @@ -122,7 +123,7 @@ func (s *GeoHashGridAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: diff --git a/typedapi/types/geohashgridbucket.go b/typedapi/types/geohashgridbucket.go index 38499e0636..4889dec38d 100644 --- a/typedapi/types/geohashgridbucket.go +++ b/typedapi/types/geohashgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // GeoHashGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L510-L512 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L510-L512 type GeoHashGridBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *GeoHashGridBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -71,7 +71,7 @@ func (s *GeoHashGridBucket) UnmarshalJSON(data []byte) error { case "key": if err := dec.Decode(&s.Key); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } default: @@ -88,490 +88,490 @@ func (s *GeoHashGridBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -581,7 +581,7 @@ func (s *GeoHashGridBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/geohashlocation.go b/typedapi/types/geohashlocation.go index 24c15e12cc..3eb25f351f 100644 --- a/typedapi/types/geohashlocation.go +++ b/typedapi/types/geohashlocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // GeoHashLocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Geo.ts#L131-L133 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Geo.ts#L131-L133 type GeoHashLocation struct { Geohash string `json:"geohash"` } @@ -51,7 +52,7 @@ func (s *GeoHashLocation) UnmarshalJSON(data []byte) error { case "geohash": if err := dec.Decode(&s.Geohash); err != nil { - return err + return fmt.Errorf("%s | %w", "Geohash", err) } } diff --git a/typedapi/types/geohashprecision.go b/typedapi/types/geohashprecision.go index 0af807defd..672ab17e39 100644 --- a/typedapi/types/geohashprecision.go +++ b/typedapi/types/geohashprecision.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // int // string // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Geo.ts#L86-L90 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Geo.ts#L86-L90 type GeoHashPrecision interface{} diff --git a/typedapi/types/geohexgridaggregate.go b/typedapi/types/geohexgridaggregate.go index 427f285e56..2fcf10c74a 100644 --- a/typedapi/types/geohexgridaggregate.go +++ b/typedapi/types/geohexgridaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // GeoHexGridAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L522-L523 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L522-L523 type GeoHexGridAggregate struct { Buckets BucketsGeoHexGridBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *GeoHexGridAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]GeoHexGridBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []GeoHexGridBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/geohexgridaggregation.go b/typedapi/types/geohexgridaggregation.go index c0b329d2e4..9b0ac5c585 100644 --- a/typedapi/types/geohexgridaggregation.go +++ b/typedapi/types/geohexgridaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GeohexGridAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L460-L485 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L460-L485 type GeohexGridAggregation struct { // Bounds Bounding box used to filter the geo-points in each bucket. Bounds GeoBounds `json:"bounds,omitempty"` @@ -65,23 +66,23 @@ func (s *GeohexGridAggregation) UnmarshalJSON(data []byte) error { case "bounds": if err := dec.Decode(&s.Bounds); err != nil { - return err + return fmt.Errorf("%s | %w", "Bounds", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -98,7 +99,7 @@ func (s *GeohexGridAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Precision", err) } s.Precision = &value case float64: @@ -114,7 +115,7 @@ func (s *GeohexGridAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardSize", err) } s.ShardSize = &value case float64: @@ -130,7 +131,7 @@ func (s *GeohexGridAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: diff --git a/typedapi/types/geohexgridbucket.go b/typedapi/types/geohexgridbucket.go index 488967a588..6bee77ed19 100644 --- a/typedapi/types/geohexgridbucket.go +++ b/typedapi/types/geohexgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // GeoHexGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L525-L527 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L525-L527 type GeoHexGridBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *GeoHexGridBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -71,7 +71,7 @@ func (s *GeoHexGridBucket) UnmarshalJSON(data []byte) error { case "key": if err := dec.Decode(&s.Key); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } default: @@ -88,490 +88,490 @@ func (s *GeoHexGridBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -581,7 +581,7 @@ func (s *GeoHexGridBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/geoipdownloadstatistics.go b/typedapi/types/geoipdownloadstatistics.go index 2d3695d261..885f0ac08c 100644 --- a/typedapi/types/geoipdownloadstatistics.go +++ b/typedapi/types/geoipdownloadstatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GeoIpDownloadStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/geo_ip_stats/types.ts#L24-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/geo_ip_stats/types.ts#L24-L35 type GeoIpDownloadStatistics struct { // DatabaseCount Current number of databases available for use. DatabaseCount int `json:"database_count"` @@ -67,7 +68,7 @@ func (s *GeoIpDownloadStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DatabaseCount", err) } s.DatabaseCount = value case float64: @@ -83,7 +84,7 @@ func (s *GeoIpDownloadStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FailedDownloads", err) } s.FailedDownloads = value case float64: @@ -99,7 +100,7 @@ func (s *GeoIpDownloadStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SkippedUpdates", err) } s.SkippedUpdates = value case float64: @@ -115,7 +116,7 @@ func (s *GeoIpDownloadStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SuccessfulDownloads", err) } s.SuccessfulDownloads = value case float64: @@ -125,7 +126,7 @@ func (s *GeoIpDownloadStatistics) UnmarshalJSON(data []byte) error { case "total_download_time": if err := dec.Decode(&s.TotalDownloadTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalDownloadTime", err) } } diff --git a/typedapi/types/geoipnodedatabasename.go b/typedapi/types/geoipnodedatabasename.go index b8e89b9299..7dae963286 100644 --- a/typedapi/types/geoipnodedatabasename.go +++ b/typedapi/types/geoipnodedatabasename.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // GeoIpNodeDatabaseName type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/geo_ip_stats/types.ts#L45-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/geo_ip_stats/types.ts#L45-L48 type GeoIpNodeDatabaseName struct { // Name Name of the database. Name string `json:"name"` @@ -52,7 +53,7 @@ func (s *GeoIpNodeDatabaseName) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/geoipnodedatabases.go b/typedapi/types/geoipnodedatabases.go index f571d8823d..d674161aa6 100644 --- a/typedapi/types/geoipnodedatabases.go +++ b/typedapi/types/geoipnodedatabases.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // GeoIpNodeDatabases type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/geo_ip_stats/types.ts#L37-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/geo_ip_stats/types.ts#L37-L43 type GeoIpNodeDatabases struct { // Databases Downloaded databases for the node. Databases []GeoIpNodeDatabaseName `json:"databases"` diff --git a/typedapi/types/geoipprocessor.go b/typedapi/types/geoipprocessor.go index 9740b16d9f..5abd9b0008 100644 --- a/typedapi/types/geoipprocessor.go +++ b/typedapi/types/geoipprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GeoIpProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L339-L368 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L339-L368 type GeoIpProcessor struct { // DatabaseFile The database filename referring to a database the module ships with // (GeoLite2-City.mmdb, GeoLite2-Country.mmdb, or GeoLite2-ASN.mmdb) or a custom @@ -82,7 +83,7 @@ func (s *GeoIpProcessor) UnmarshalJSON(data []byte) error { case "database_file": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DatabaseFile", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -94,7 +95,7 @@ func (s *GeoIpProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +106,7 @@ func (s *GeoIpProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "first_only": @@ -115,7 +116,7 @@ func (s *GeoIpProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FirstOnly", err) } s.FirstOnly = &value case bool: @@ -125,7 +126,7 @@ func (s *GeoIpProcessor) UnmarshalJSON(data []byte) error { case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -141,7 +142,7 @@ func (s *GeoIpProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -155,7 +156,7 @@ func (s *GeoIpProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -164,18 +165,18 @@ func (s *GeoIpProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "properties": if err := dec.Decode(&s.Properties); err != nil { - return err + return fmt.Errorf("%s | %w", "Properties", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -186,7 +187,7 @@ func (s *GeoIpProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/geoline.go b/typedapi/types/geoline.go index 611c005a1c..eb6320eaa9 100644 --- a/typedapi/types/geoline.go +++ b/typedapi/types/geoline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GeoLine type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Geo.ts#L56-L62 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Geo.ts#L56-L62 type GeoLine struct { // Coordinates Array of `[lon, lat]` coordinates Coordinates [][]Float64 `json:"coordinates"` @@ -55,13 +56,13 @@ func (s *GeoLine) UnmarshalJSON(data []byte) error { case "coordinates": if err := dec.Decode(&s.Coordinates); err != nil { - return err + return fmt.Errorf("%s | %w", "Coordinates", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/geolineaggregate.go b/typedapi/types/geolineaggregate.go index af304abad1..e9f482ef2e 100644 --- a/typedapi/types/geolineaggregate.go +++ b/typedapi/types/geolineaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GeoLineAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L784-L791 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L784-L791 type GeoLineAggregate struct { Geometry GeoLine `json:"geometry"` Meta Metadata `json:"meta,omitempty"` @@ -55,23 +56,23 @@ func (s *GeoLineAggregate) UnmarshalJSON(data []byte) error { case "geometry": if err := dec.Decode(&s.Geometry); err != nil { - return err + return fmt.Errorf("%s | %w", "Geometry", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": if err := dec.Decode(&s.Properties); err != nil { - return err + return fmt.Errorf("%s | %w", "Properties", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/geolineaggregation.go b/typedapi/types/geolineaggregation.go index 6a1b87e8ea..d522c02bb3 100644 --- a/typedapi/types/geolineaggregation.go +++ b/typedapi/types/geolineaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // GeoLineAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L121-L146 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L121-L146 type GeoLineAggregation struct { // IncludeSort When `true`, returns an additional array of the sort values in the feature // properties. @@ -73,7 +74,7 @@ func (s *GeoLineAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeSort", err) } s.IncludeSort = &value case bool: @@ -82,7 +83,7 @@ func (s *GeoLineAggregation) UnmarshalJSON(data []byte) error { case "point": if err := dec.Decode(&s.Point); err != nil { - return err + return fmt.Errorf("%s | %w", "Point", err) } case "size": @@ -93,7 +94,7 @@ func (s *GeoLineAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -103,12 +104,12 @@ func (s *GeoLineAggregation) UnmarshalJSON(data []byte) error { case "sort": if err := dec.Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } case "sort_order": if err := dec.Decode(&s.SortOrder); err != nil { - return err + return fmt.Errorf("%s | %w", "SortOrder", err) } } diff --git a/typedapi/types/geolinepoint.go b/typedapi/types/geolinepoint.go index cd753e336f..1e2e243772 100644 --- a/typedapi/types/geolinepoint.go +++ b/typedapi/types/geolinepoint.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // GeoLinePoint type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L155-L160 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L155-L160 type GeoLinePoint struct { // Field The name of the geo_point field. Field string `json:"field"` @@ -52,7 +53,7 @@ func (s *GeoLinePoint) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } } diff --git a/typedapi/types/geolinesort.go b/typedapi/types/geolinesort.go index be89460ac1..d689952ede 100644 --- a/typedapi/types/geolinesort.go +++ b/typedapi/types/geolinesort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // GeoLineSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L148-L153 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L148-L153 type GeoLineSort struct { // Field The name of the numeric field to use as the sort key for ordering the points. Field string `json:"field"` @@ -52,7 +53,7 @@ func (s *GeoLineSort) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } } diff --git a/typedapi/types/geolocation.go b/typedapi/types/geolocation.go index 172ab258bd..5d839889ed 100644 --- a/typedapi/types/geolocation.go +++ b/typedapi/types/geolocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -27,5 +27,5 @@ package types // []Float64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Geo.ts#L104-L118 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Geo.ts#L104-L118 type GeoLocation interface{} diff --git a/typedapi/types/geopointproperty.go b/typedapi/types/geopointproperty.go index f2f5eafd87..d519588919 100644 --- a/typedapi/types/geopointproperty.go +++ b/typedapi/types/geopointproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,15 +24,17 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/dynamicmapping" + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/onscripterror" ) // GeoPointProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/geo.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/geo.ts#L24-L32 type GeoPointProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -41,13 +43,16 @@ type GeoPointProperty struct { IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` IgnoreZValue *bool `json:"ignore_z_value,omitempty"` + Index *bool `json:"index,omitempty"` // Meta Metadata about the field. - Meta map[string]string `json:"meta,omitempty"` - NullValue GeoLocation `json:"null_value,omitempty"` - Properties map[string]Property `json:"properties,omitempty"` - Similarity *string `json:"similarity,omitempty"` - Store *bool `json:"store,omitempty"` - Type string `json:"type,omitempty"` + Meta map[string]string `json:"meta,omitempty"` + NullValue GeoLocation `json:"null_value,omitempty"` + OnScriptError *onscripterror.OnScriptError `json:"on_script_error,omitempty"` + Properties map[string]Property `json:"properties,omitempty"` + Script Script `json:"script,omitempty"` + Similarity *string `json:"similarity,omitempty"` + Store *bool `json:"store,omitempty"` + Type string `json:"type,omitempty"` } func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { @@ -71,13 +76,13 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -88,7 +93,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -97,7 +102,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -415,7 +420,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -430,7 +435,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -444,24 +449,43 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreZValue", err) } s.IgnoreZValue = &value case bool: s.IgnoreZValue = &v } + case "index": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Index", err) + } + s.Index = &value + case bool: + s.Index = &v + } + case "meta": if s.Meta == nil { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": if err := dec.Decode(&s.NullValue); err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) + } + + case "on_script_error": + if err := dec.Decode(&s.OnScriptError); err != nil { + return fmt.Errorf("%s | %w", "OnScriptError", err) } case "properties": @@ -771,10 +795,46 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { } } + case "script": + message := json.RawMessage{} + if err := dec.Decode(&message); err != nil { + return fmt.Errorf("%s | %w", "Script", err) + } + keyDec := json.NewDecoder(bytes.NewReader(message)) + for { + t, err := keyDec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return fmt.Errorf("%s | %w", "Script", err) + } + + switch t { + + case "lang", "options", "source": + o := NewInlineScript() + localDec := json.NewDecoder(bytes.NewReader(message)) + if err := localDec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Script", err) + } + s.Script = o + + case "id": + o := NewStoredScriptId() + localDec := json.NewDecoder(bytes.NewReader(message)) + if err := localDec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Script", err) + } + s.Script = o + + } + } + case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -790,7 +850,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -799,7 +859,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } @@ -818,9 +878,12 @@ func (s GeoPointProperty) MarshalJSON() ([]byte, error) { IgnoreAbove: s.IgnoreAbove, IgnoreMalformed: s.IgnoreMalformed, IgnoreZValue: s.IgnoreZValue, + Index: s.Index, Meta: s.Meta, NullValue: s.NullValue, + OnScriptError: s.OnScriptError, Properties: s.Properties, + Script: s.Script, Similarity: s.Similarity, Store: s.Store, Type: s.Type, diff --git a/typedapi/types/geopolygonpoints.go b/typedapi/types/geopolygonpoints.go index 6f87dee330..2d7e9457d1 100644 --- a/typedapi/types/geopolygonpoints.go +++ b/typedapi/types/geopolygonpoints.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // GeoPolygonPoints type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/geo.ts#L87-L89 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/geo.ts#L87-L89 type GeoPolygonPoints struct { Points []GeoLocation `json:"points"` } diff --git a/typedapi/types/geopolygonquery.go b/typedapi/types/geopolygonquery.go index fa51cdd46c..ef1f58c5f3 100644 --- a/typedapi/types/geopolygonquery.go +++ b/typedapi/types/geopolygonquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -33,7 +33,7 @@ import ( // GeoPolygonQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/geo.ts#L91-L99 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/geo.ts#L91-L99 type GeoPolygonQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -69,7 +69,7 @@ func (s *GeoPolygonQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -83,7 +83,7 @@ func (s *GeoPolygonQuery) UnmarshalJSON(data []byte) error { s.GeoPolygonQuery = make(map[string]GeoPolygonPoints, 0) } if err := dec.Decode(&s.GeoPolygonQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "GeoPolygonQuery", err) } case "ignore_unmapped": @@ -93,7 +93,7 @@ func (s *GeoPolygonQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnmapped", err) } s.IgnoreUnmapped = &value case bool: @@ -103,7 +103,7 @@ func (s *GeoPolygonQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -114,7 +114,7 @@ func (s *GeoPolygonQuery) UnmarshalJSON(data []byte) error { case "validation_method": if err := dec.Decode(&s.ValidationMethod); err != nil { - return err + return fmt.Errorf("%s | %w", "ValidationMethod", err) } default: diff --git a/typedapi/types/georesults.go b/typedapi/types/georesults.go index 311cccef06..226ca09818 100644 --- a/typedapi/types/georesults.go +++ b/typedapi/types/georesults.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GeoResults type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Anomaly.ts#L145-L154 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Anomaly.ts#L145-L154 type GeoResults struct { // ActualPoint The actual value for the bucket formatted as a `geo_point`. ActualPoint string `json:"actual_point"` @@ -56,7 +57,7 @@ func (s *GeoResults) UnmarshalJSON(data []byte) error { case "actual_point": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ActualPoint", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,7 +69,7 @@ func (s *GeoResults) UnmarshalJSON(data []byte) error { case "typical_point": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TypicalPoint", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/geoshapefieldquery.go b/typedapi/types/geoshapefieldquery.go index 9dd973572a..1d3f251c55 100644 --- a/typedapi/types/geoshapefieldquery.go +++ b/typedapi/types/geoshapefieldquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/geoshaperelation" @@ -31,7 +32,7 @@ import ( // GeoShapeFieldQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/geo.ts#L106-L117 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/geo.ts#L106-L117 type GeoShapeFieldQuery struct { // IndexedShape Query using an indexed shape retrieved from the the specified document and // path. @@ -58,17 +59,17 @@ func (s *GeoShapeFieldQuery) UnmarshalJSON(data []byte) error { case "indexed_shape": if err := dec.Decode(&s.IndexedShape); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexedShape", err) } case "relation": if err := dec.Decode(&s.Relation); err != nil { - return err + return fmt.Errorf("%s | %w", "Relation", err) } case "shape": if err := dec.Decode(&s.Shape); err != nil { - return err + return fmt.Errorf("%s | %w", "Shape", err) } } diff --git a/typedapi/types/geoshapeproperty.go b/typedapi/types/geoshapeproperty.go index 43fbb574dc..8af3b3e079 100644 --- a/typedapi/types/geoshapeproperty.go +++ b/typedapi/types/geoshapeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // GeoShapeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/geo.ts#L37-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/geo.ts#L41-L54 type GeoShapeProperty struct { Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -76,7 +77,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -89,13 +90,13 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -106,7 +107,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -115,7 +116,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -433,7 +434,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -448,7 +449,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -462,7 +463,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreZValue", err) } s.IgnoreZValue = &value case bool: @@ -474,12 +475,12 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "orientation": if err := dec.Decode(&s.Orientation); err != nil { - return err + return fmt.Errorf("%s | %w", "Orientation", err) } case "properties": @@ -792,7 +793,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -808,7 +809,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -817,12 +818,12 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { case "strategy": if err := dec.Decode(&s.Strategy); err != nil { - return err + return fmt.Errorf("%s | %w", "Strategy", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/geoshapequery.go b/typedapi/types/geoshapequery.go index 064ba5f621..0377d06c00 100644 --- a/typedapi/types/geoshapequery.go +++ b/typedapi/types/geoshapequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -31,7 +31,7 @@ import ( // GeoShapeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/geo.ts#L121-L131 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/geo.ts#L121-L131 type GeoShapeQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -69,7 +69,7 @@ func (s *GeoShapeQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -83,7 +83,7 @@ func (s *GeoShapeQuery) UnmarshalJSON(data []byte) error { s.GeoShapeQuery = make(map[string]GeoShapeFieldQuery, 0) } if err := dec.Decode(&s.GeoShapeQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "GeoShapeQuery", err) } case "ignore_unmapped": @@ -93,7 +93,7 @@ func (s *GeoShapeQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnmapped", err) } s.IgnoreUnmapped = &value case bool: @@ -103,7 +103,7 @@ func (s *GeoShapeQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/geotilegridaggregate.go b/typedapi/types/geotilegridaggregate.go index 670fa98198..667879d467 100644 --- a/typedapi/types/geotilegridaggregate.go +++ b/typedapi/types/geotilegridaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // GeoTileGridAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L514-L516 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L514-L516 type GeoTileGridAggregate struct { Buckets BucketsGeoTileGridBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *GeoTileGridAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]GeoTileGridBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []GeoTileGridBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/geotilegridaggregation.go b/typedapi/types/geotilegridaggregation.go index fea7601737..b1fde406ac 100644 --- a/typedapi/types/geotilegridaggregation.go +++ b/typedapi/types/geotilegridaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GeoTileGridAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L432-L458 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L432-L458 type GeoTileGridAggregation struct { // Bounds A bounding box to filter the geo-points or geo-shapes in each bucket. Bounds GeoBounds `json:"bounds,omitempty"` @@ -68,23 +69,23 @@ func (s *GeoTileGridAggregation) UnmarshalJSON(data []byte) error { case "bounds": if err := dec.Decode(&s.Bounds); err != nil { - return err + return fmt.Errorf("%s | %w", "Bounds", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -95,7 +96,7 @@ func (s *GeoTileGridAggregation) UnmarshalJSON(data []byte) error { case "precision": if err := dec.Decode(&s.Precision); err != nil { - return err + return fmt.Errorf("%s | %w", "Precision", err) } case "shard_size": @@ -106,7 +107,7 @@ func (s *GeoTileGridAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardSize", err) } s.ShardSize = &value case float64: @@ -122,7 +123,7 @@ func (s *GeoTileGridAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: diff --git a/typedapi/types/geotilegridbucket.go b/typedapi/types/geotilegridbucket.go index 799cde68ad..7f98264887 100644 --- a/typedapi/types/geotilegridbucket.go +++ b/typedapi/types/geotilegridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // GeoTileGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L518-L520 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L518-L520 type GeoTileGridBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *GeoTileGridBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -71,7 +71,7 @@ func (s *GeoTileGridBucket) UnmarshalJSON(data []byte) error { case "key": if err := dec.Decode(&s.Key); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } default: @@ -88,490 +88,490 @@ func (s *GeoTileGridBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -581,7 +581,7 @@ func (s *GeoTileGridBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/getmigrationfeature.go b/typedapi/types/getmigrationfeature.go index 705b676e07..7eb67c5405 100644 --- a/typedapi/types/getmigrationfeature.go +++ b/typedapi/types/getmigrationfeature.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // GetMigrationFeature type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L37-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L37-L42 type GetMigrationFeature struct { FeatureName string `json:"feature_name"` Indices []MigrationFeatureIndexInfo `json:"indices"` @@ -58,7 +59,7 @@ func (s *GetMigrationFeature) UnmarshalJSON(data []byte) error { case "feature_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -69,17 +70,17 @@ func (s *GetMigrationFeature) UnmarshalJSON(data []byte) error { case "indices": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "migration_status": if err := dec.Decode(&s.MigrationStatus); err != nil { - return err + return fmt.Errorf("%s | %w", "MigrationStatus", err) } case "minimum_index_version": if err := dec.Decode(&s.MinimumIndexVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumIndexVersion", err) } } diff --git a/typedapi/types/getresult.go b/typedapi/types/getresult.go index e33176a1b5..5fd4bcd528 100644 --- a/typedapi/types/getresult.go +++ b/typedapi/types/getresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GetResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/get/types.ts#L25-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/get/types.ts#L25-L35 type GetResult struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` @@ -63,7 +64,7 @@ func (s *GetResult) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "found": @@ -73,7 +74,7 @@ func (s *GetResult) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Found", err) } s.Found = value case bool: @@ -82,12 +83,12 @@ func (s *GetResult) UnmarshalJSON(data []byte) error { case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "_primary_term": @@ -97,7 +98,7 @@ func (s *GetResult) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryTerm_", err) } s.PrimaryTerm_ = &value case float64: @@ -108,7 +109,7 @@ func (s *GetResult) UnmarshalJSON(data []byte) error { case "_routing": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -119,17 +120,17 @@ func (s *GetResult) UnmarshalJSON(data []byte) error { case "_seq_no": if err := dec.Decode(&s.SeqNo_); err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNo_", err) } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } case "_version": if err := dec.Decode(&s.Version_); err != nil { - return err + return fmt.Errorf("%s | %w", "Version_", err) } } diff --git a/typedapi/types/getscriptcontext.go b/typedapi/types/getscriptcontext.go index e468590339..676be035fd 100644 --- a/typedapi/types/getscriptcontext.go +++ b/typedapi/types/getscriptcontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // GetScriptContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/get_script_context/types.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/get_script_context/types.ts#L22-L25 type GetScriptContext struct { Methods []ContextMethod `json:"methods"` Name string `json:"name"` @@ -52,12 +53,12 @@ func (s *GetScriptContext) UnmarshalJSON(data []byte) error { case "methods": if err := dec.Decode(&s.Methods); err != nil { - return err + return fmt.Errorf("%s | %w", "Methods", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/getstats.go b/typedapi/types/getstats.go index 8fa083e90a..2a5cc97747 100644 --- a/typedapi/types/getstats.go +++ b/typedapi/types/getstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GetStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L130-L141 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L130-L141 type GetStats struct { Current int64 `json:"current"` ExistsTime Duration `json:"exists_time,omitempty"` @@ -66,7 +67,7 @@ func (s *GetStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Current", err) } s.Current = value case float64: @@ -76,12 +77,12 @@ func (s *GetStats) UnmarshalJSON(data []byte) error { case "exists_time": if err := dec.Decode(&s.ExistsTime); err != nil { - return err + return fmt.Errorf("%s | %w", "ExistsTime", err) } case "exists_time_in_millis": if err := dec.Decode(&s.ExistsTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ExistsTimeInMillis", err) } case "exists_total": @@ -91,7 +92,7 @@ func (s *GetStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ExistsTotal", err) } s.ExistsTotal = value case float64: @@ -101,12 +102,12 @@ func (s *GetStats) UnmarshalJSON(data []byte) error { case "missing_time": if err := dec.Decode(&s.MissingTime); err != nil { - return err + return fmt.Errorf("%s | %w", "MissingTime", err) } case "missing_time_in_millis": if err := dec.Decode(&s.MissingTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "MissingTimeInMillis", err) } case "missing_total": @@ -116,7 +117,7 @@ func (s *GetStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MissingTotal", err) } s.MissingTotal = value case float64: @@ -126,12 +127,12 @@ func (s *GetStats) UnmarshalJSON(data []byte) error { case "time": if err := dec.Decode(&s.Time); err != nil { - return err + return fmt.Errorf("%s | %w", "Time", err) } case "time_in_millis": if err := dec.Decode(&s.TimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeInMillis", err) } case "total": @@ -141,7 +142,7 @@ func (s *GetStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: diff --git a/typedapi/types/getuserprofileerrors.go b/typedapi/types/getuserprofileerrors.go index b4b9709895..6ca9ce2361 100644 --- a/typedapi/types/getuserprofileerrors.go +++ b/typedapi/types/getuserprofileerrors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GetUserProfileErrors type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_user_profile/types.ts#L25-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_user_profile/types.ts#L25-L28 type GetUserProfileErrors struct { Count int64 `json:"count"` Details map[string]ErrorCause `json:"details"` @@ -58,7 +59,7 @@ func (s *GetUserProfileErrors) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -71,7 +72,7 @@ func (s *GetUserProfileErrors) UnmarshalJSON(data []byte) error { s.Details = make(map[string]ErrorCause, 0) } if err := dec.Decode(&s.Details); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } } diff --git a/typedapi/types/globalaggregate.go b/typedapi/types/globalaggregate.go index 8e3483edd8..0efc43275c 100644 --- a/typedapi/types/globalaggregate.go +++ b/typedapi/types/globalaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // GlobalAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L492-L493 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L492-L493 type GlobalAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *GlobalAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -71,7 +71,7 @@ func (s *GlobalAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } default: @@ -88,490 +88,490 @@ func (s *GlobalAggregate) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -581,7 +581,7 @@ func (s *GlobalAggregate) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/globalaggregation.go b/typedapi/types/globalaggregation.go index ac853ce9a1..822f6e9a50 100644 --- a/typedapi/types/globalaggregation.go +++ b/typedapi/types/globalaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GlobalAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L487-L487 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L487-L487 type GlobalAggregation struct { Meta Metadata `json:"meta,omitempty"` Name *string `json:"name,omitempty"` @@ -53,13 +54,13 @@ func (s *GlobalAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/globalprivilege.go b/typedapi/types/globalprivilege.go index 5ce1ba0cfe..410b0ef5c9 100644 --- a/typedapi/types/globalprivilege.go +++ b/typedapi/types/globalprivilege.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // GlobalPrivilege type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/Privileges.ts#L189-L191 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/Privileges.ts#L189-L191 type GlobalPrivilege struct { Application ApplicationGlobalUserPrivileges `json:"application"` } diff --git a/typedapi/types/googlenormalizeddistanceheuristic.go b/typedapi/types/googlenormalizeddistanceheuristic.go index 680265a785..6e1f5164a5 100644 --- a/typedapi/types/googlenormalizeddistanceheuristic.go +++ b/typedapi/types/googlenormalizeddistanceheuristic.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GoogleNormalizedDistanceHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L746-L751 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L746-L751 type GoogleNormalizedDistanceHeuristic struct { // BackgroundIsSuperset Set to `false` if you defined a custom background filter that represents a // different set of documents that you want to compare to. @@ -59,7 +60,7 @@ func (s *GoogleNormalizedDistanceHeuristic) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "BackgroundIsSuperset", err) } s.BackgroundIsSuperset = &value case bool: diff --git a/typedapi/types/grantapikey.go b/typedapi/types/grantapikey.go index 0f513c99e4..ae7f577e1e 100644 --- a/typedapi/types/grantapikey.go +++ b/typedapi/types/grantapikey.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // GrantApiKey type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/grant_api_key/types.ts#L25-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/grant_api_key/types.ts#L25-L46 type GrantApiKey struct { // Expiration Expiration time for the API key. By default, API keys never expire. Expiration *string `json:"expiration,omitempty"` @@ -65,17 +66,17 @@ func (s *GrantApiKey) UnmarshalJSON(data []byte) error { case "expiration": if err := dec.Decode(&s.Expiration); err != nil { - return err + return fmt.Errorf("%s | %w", "Expiration", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "role_descriptors": @@ -88,13 +89,13 @@ func (s *GrantApiKey) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]RoleDescriptor, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "RoleDescriptors", err) } s.RoleDescriptors = append(s.RoleDescriptors, o) case '[': o := make([]map[string]RoleDescriptor, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "RoleDescriptors", err) } s.RoleDescriptors = o } diff --git a/typedapi/types/grokprocessor.go b/typedapi/types/grokprocessor.go index 24b57e0cda..7ff9c7e67f 100644 --- a/typedapi/types/grokprocessor.go +++ b/typedapi/types/grokprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GrokProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L672-L697 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L672-L697 type GrokProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -80,7 +81,7 @@ func (s *GrokProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -91,13 +92,13 @@ func (s *GrokProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -113,7 +114,7 @@ func (s *GrokProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -127,7 +128,7 @@ func (s *GrokProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -136,7 +137,7 @@ func (s *GrokProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "pattern_definitions": @@ -144,18 +145,18 @@ func (s *GrokProcessor) UnmarshalJSON(data []byte) error { s.PatternDefinitions = make(map[string]string, 0) } if err := dec.Decode(&s.PatternDefinitions); err != nil { - return err + return fmt.Errorf("%s | %w", "PatternDefinitions", err) } case "patterns": if err := dec.Decode(&s.Patterns); err != nil { - return err + return fmt.Errorf("%s | %w", "Patterns", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -171,7 +172,7 @@ func (s *GrokProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TraceMatch", err) } s.TraceMatch = &value case bool: diff --git a/typedapi/types/groupings.go b/typedapi/types/groupings.go index ce3ce03728..a2f15bcc5d 100644 --- a/typedapi/types/groupings.go +++ b/typedapi/types/groupings.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Groupings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/_types/Groupings.ts#L24-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/_types/Groupings.ts#L24-L40 type Groupings struct { // DateHistogram A date histogram group aggregates a date field into time-based buckets. // This group is mandatory; you currently cannot roll up documents without a diff --git a/typedapi/types/gsubprocessor.go b/typedapi/types/gsubprocessor.go index 312faf7248..0a249759dd 100644 --- a/typedapi/types/gsubprocessor.go +++ b/typedapi/types/gsubprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // GsubProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L699-L723 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L699-L723 type GsubProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -76,7 +77,7 @@ func (s *GsubProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -87,13 +88,13 @@ func (s *GsubProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,7 +110,7 @@ func (s *GsubProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -123,7 +124,7 @@ func (s *GsubProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -132,13 +133,13 @@ func (s *GsubProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -150,7 +151,7 @@ func (s *GsubProcessor) UnmarshalJSON(data []byte) error { case "replacement": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Replacement", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -162,7 +163,7 @@ func (s *GsubProcessor) UnmarshalJSON(data []byte) error { case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -173,7 +174,7 @@ func (s *GsubProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/halffloatnumberproperty.go b/typedapi/types/halffloatnumberproperty.go index 4e4b7e8c0a..b2d0c0aa76 100644 --- a/typedapi/types/halffloatnumberproperty.go +++ b/typedapi/types/halffloatnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // HalfFloatNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L139-L142 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L139-L142 type HalfFloatNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -84,7 +85,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -100,7 +101,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -113,13 +114,13 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -130,7 +131,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -139,7 +140,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -457,7 +458,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -472,7 +473,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -486,7 +487,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -498,7 +499,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": @@ -508,7 +509,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } f := float32(value) s.NullValue = &f @@ -519,7 +520,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case "on_script_error": if err := dec.Decode(&s.OnScriptError); err != nil { - return err + return fmt.Errorf("%s | %w", "OnScriptError", err) } case "properties": @@ -832,7 +833,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -841,7 +842,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -850,7 +851,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -858,7 +859,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -868,7 +869,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -884,7 +885,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -898,7 +899,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesDimension", err) } s.TimeSeriesDimension = &value case bool: @@ -907,12 +908,12 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case "time_series_metric": if err := dec.Decode(&s.TimeSeriesMetric); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesMetric", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/haschildquery.go b/typedapi/types/haschildquery.go index d9eca29334..52c1fc3e59 100644 --- a/typedapi/types/haschildquery.go +++ b/typedapi/types/haschildquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // HasChildQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/joining.ts#L41-L76 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/joining.ts#L41-L76 type HasChildQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -89,7 +90,7 @@ func (s *HasChildQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -105,7 +106,7 @@ func (s *HasChildQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnmapped", err) } s.IgnoreUnmapped = &value case bool: @@ -114,7 +115,7 @@ func (s *HasChildQuery) UnmarshalJSON(data []byte) error { case "inner_hits": if err := dec.Decode(&s.InnerHits); err != nil { - return err + return fmt.Errorf("%s | %w", "InnerHits", err) } case "max_children": @@ -125,7 +126,7 @@ func (s *HasChildQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxChildren", err) } s.MaxChildren = &value case float64: @@ -141,7 +142,7 @@ func (s *HasChildQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinChildren", err) } s.MinChildren = &value case float64: @@ -151,13 +152,13 @@ func (s *HasChildQuery) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -168,12 +169,12 @@ func (s *HasChildQuery) UnmarshalJSON(data []byte) error { case "score_mode": if err := dec.Decode(&s.ScoreMode); err != nil { - return err + return fmt.Errorf("%s | %w", "ScoreMode", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/hasparentquery.go b/typedapi/types/hasparentquery.go index 95e9f4f40b..f81edfcdb2 100644 --- a/typedapi/types/hasparentquery.go +++ b/typedapi/types/hasparentquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // HasParentQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/joining.ts#L78-L104 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/joining.ts#L78-L104 type HasParentQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -79,7 +80,7 @@ func (s *HasParentQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -95,7 +96,7 @@ func (s *HasParentQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnmapped", err) } s.IgnoreUnmapped = &value case bool: @@ -104,23 +105,23 @@ func (s *HasParentQuery) UnmarshalJSON(data []byte) error { case "inner_hits": if err := dec.Decode(&s.InnerHits); err != nil { - return err + return fmt.Errorf("%s | %w", "InnerHits", err) } case "parent_type": if err := dec.Decode(&s.ParentType); err != nil { - return err + return fmt.Errorf("%s | %w", "ParentType", err) } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -136,7 +137,7 @@ func (s *HasParentQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Score", err) } s.Score = &value case bool: diff --git a/typedapi/types/hasprivilegesuserprofileerrors.go b/typedapi/types/hasprivilegesuserprofileerrors.go index c65800df6e..24569cdcd5 100644 --- a/typedapi/types/hasprivilegesuserprofileerrors.go +++ b/typedapi/types/hasprivilegesuserprofileerrors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // HasPrivilegesUserProfileErrors type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/has_privileges_user_profile/types.ts#L39-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/has_privileges_user_profile/types.ts#L39-L42 type HasPrivilegesUserProfileErrors struct { Count int64 `json:"count"` Details map[string]ErrorCause `json:"details"` @@ -58,7 +59,7 @@ func (s *HasPrivilegesUserProfileErrors) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -71,7 +72,7 @@ func (s *HasPrivilegesUserProfileErrors) UnmarshalJSON(data []byte) error { s.Details = make(map[string]ErrorCause, 0) } if err := dec.Decode(&s.Details); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } } diff --git a/typedapi/types/hdrmethod.go b/typedapi/types/hdrmethod.go index 338cc12eb4..f402a675bb 100644 --- a/typedapi/types/hdrmethod.go +++ b/typedapi/types/hdrmethod.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // HdrMethod type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L216-L221 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L216-L221 type HdrMethod struct { // NumberOfSignificantValueDigits Specifies the resolution of values for the histogram in number of significant // digits. @@ -60,7 +61,7 @@ func (s *HdrMethod) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfSignificantValueDigits", err) } s.NumberOfSignificantValueDigits = &value case float64: diff --git a/typedapi/types/hdrpercentileranksaggregate.go b/typedapi/types/hdrpercentileranksaggregate.go index 6f20cc3e5d..2306167ae4 100644 --- a/typedapi/types/hdrpercentileranksaggregate.go +++ b/typedapi/types/hdrpercentileranksaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // HdrPercentileRanksAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L169-L170 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L169-L170 type HdrPercentileRanksAggregate struct { Meta Metadata `json:"meta,omitempty"` Values Percentiles `json:"values"` @@ -52,7 +53,7 @@ func (s *HdrPercentileRanksAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "values": @@ -65,13 +66,13 @@ func (s *HdrPercentileRanksAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(KeyedPercentiles, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } s.Values = o case '[': o := []ArrayPercentilesItem{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } s.Values = o } diff --git a/typedapi/types/hdrpercentilesaggregate.go b/typedapi/types/hdrpercentilesaggregate.go index 4b90e77a9e..e3e39281a7 100644 --- a/typedapi/types/hdrpercentilesaggregate.go +++ b/typedapi/types/hdrpercentilesaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // HdrPercentilesAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L166-L167 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L166-L167 type HdrPercentilesAggregate struct { Meta Metadata `json:"meta,omitempty"` Values Percentiles `json:"values"` @@ -52,7 +53,7 @@ func (s *HdrPercentilesAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "values": @@ -65,13 +66,13 @@ func (s *HdrPercentilesAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(KeyedPercentiles, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } s.Values = o case '[': o := []ArrayPercentilesItem{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } s.Values = o } diff --git a/typedapi/types/healthrecord.go b/typedapi/types/healthrecord.go index 7716bd675e..6239e9909a 100644 --- a/typedapi/types/healthrecord.go +++ b/typedapi/types/healthrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // HealthRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/health/types.ts#L23-L94 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/health/types.ts#L23-L94 type HealthRecord struct { // ActiveShardsPercent active number of shards in percent ActiveShardsPercent *string `json:"active_shards_percent,omitempty"` @@ -80,7 +81,7 @@ func (s *HealthRecord) UnmarshalJSON(data []byte) error { case "active_shards_percent", "asp", "activeShardsPercent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ActiveShardsPercent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -92,7 +93,7 @@ func (s *HealthRecord) UnmarshalJSON(data []byte) error { case "cluster", "cl": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Cluster", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -103,13 +104,13 @@ func (s *HealthRecord) UnmarshalJSON(data []byte) error { case "epoch", "time": if err := dec.Decode(&s.Epoch); err != nil { - return err + return fmt.Errorf("%s | %w", "Epoch", err) } case "init", "i", "shards.initializing", "shardsInitializing": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Init", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -121,7 +122,7 @@ func (s *HealthRecord) UnmarshalJSON(data []byte) error { case "max_task_wait_time", "mtwt", "maxTaskWaitTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTaskWaitTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -133,7 +134,7 @@ func (s *HealthRecord) UnmarshalJSON(data []byte) error { case "node.data", "nd", "nodeData": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeData", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -145,7 +146,7 @@ func (s *HealthRecord) UnmarshalJSON(data []byte) error { case "node.total", "nt", "nodeTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -157,7 +158,7 @@ func (s *HealthRecord) UnmarshalJSON(data []byte) error { case "pending_tasks", "pt", "pendingTasks": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PendingTasks", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -169,7 +170,7 @@ func (s *HealthRecord) UnmarshalJSON(data []byte) error { case "pri", "p", "shards.primary", "shardsPrimary": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pri", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -181,7 +182,7 @@ func (s *HealthRecord) UnmarshalJSON(data []byte) error { case "relo", "r", "shards.relocating", "shardsRelocating": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Relo", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -193,7 +194,7 @@ func (s *HealthRecord) UnmarshalJSON(data []byte) error { case "shards", "t", "sh", "shards.total", "shardsTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -205,7 +206,7 @@ func (s *HealthRecord) UnmarshalJSON(data []byte) error { case "status", "st": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -216,13 +217,13 @@ func (s *HealthRecord) UnmarshalJSON(data []byte) error { case "timestamp", "ts", "hms", "hhmmss": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } case "unassign", "u", "shards.unassigned", "shardsUnassigned": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Unassign", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/healthresponsebody.go b/typedapi/types/healthresponsebody.go index 9fb51c79e4..61f18ff3f5 100644 --- a/typedapi/types/healthresponsebody.go +++ b/typedapi/types/healthresponsebody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // HealthResponseBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/health/ClusterHealthResponse.ts#L39-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/health/ClusterHealthResponse.ts#L39-L72 type HealthResponseBody struct { // ActivePrimaryShards The number of active primary shards. ActivePrimaryShards int `json:"active_primary_shards"` @@ -94,7 +95,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ActivePrimaryShards", err) } s.ActivePrimaryShards = value case float64: @@ -110,7 +111,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ActiveShards", err) } s.ActiveShards = value case float64: @@ -120,12 +121,12 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case "active_shards_percent_as_number": if err := dec.Decode(&s.ActiveShardsPercentAsNumber); err != nil { - return err + return fmt.Errorf("%s | %w", "ActiveShardsPercentAsNumber", err) } case "cluster_name": if err := dec.Decode(&s.ClusterName); err != nil { - return err + return fmt.Errorf("%s | %w", "ClusterName", err) } case "delayed_unassigned_shards": @@ -136,7 +137,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DelayedUnassignedShards", err) } s.DelayedUnassignedShards = value case float64: @@ -149,7 +150,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { s.Indices = make(map[string]IndexHealthStats, 0) } if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "initializing_shards": @@ -160,7 +161,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "InitializingShards", err) } s.InitializingShards = value case float64: @@ -176,7 +177,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfDataNodes", err) } s.NumberOfDataNodes = value case float64: @@ -192,7 +193,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfInFlightFetch", err) } s.NumberOfInFlightFetch = value case float64: @@ -208,7 +209,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfNodes", err) } s.NumberOfNodes = value case float64: @@ -224,7 +225,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfPendingTasks", err) } s.NumberOfPendingTasks = value case float64: @@ -240,7 +241,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RelocatingShards", err) } s.RelocatingShards = value case float64: @@ -250,17 +251,17 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "task_max_waiting_in_queue": if err := dec.Decode(&s.TaskMaxWaitingInQueue); err != nil { - return err + return fmt.Errorf("%s | %w", "TaskMaxWaitingInQueue", err) } case "task_max_waiting_in_queue_millis": if err := dec.Decode(&s.TaskMaxWaitingInQueueMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TaskMaxWaitingInQueueMillis", err) } case "timed_out": @@ -270,7 +271,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimedOut", err) } s.TimedOut = value case bool: @@ -285,7 +286,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UnassignedShards", err) } s.UnassignedShards = value case float64: diff --git a/typedapi/types/healthstatistics.go b/typedapi/types/healthstatistics.go index 944af4c31a..a9a10d0a79 100644 --- a/typedapi/types/healthstatistics.go +++ b/typedapi/types/healthstatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // HealthStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L153-L155 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L153-L155 type HealthStatistics struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -59,7 +60,7 @@ func (s *HealthStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -73,7 +74,7 @@ func (s *HealthStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -82,7 +83,7 @@ func (s *HealthStatistics) UnmarshalJSON(data []byte) error { case "invocations": if err := dec.Decode(&s.Invocations); err != nil { - return err + return fmt.Errorf("%s | %w", "Invocations", err) } } diff --git a/typedapi/types/helprecord.go b/typedapi/types/helprecord.go index 1734435409..cc7e2d910b 100644 --- a/typedapi/types/helprecord.go +++ b/typedapi/types/helprecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // HelpRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/help/types.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/help/types.ts#L20-L22 type HelpRecord struct { Endpoint string `json:"endpoint"` } @@ -53,7 +54,7 @@ func (s *HelpRecord) UnmarshalJSON(data []byte) error { case "endpoint": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Endpoint", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/highlight.go b/typedapi/types/highlight.go index cc55a9e9c7..8e174c3662 100644 --- a/typedapi/types/highlight.go +++ b/typedapi/types/highlight.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -37,7 +38,7 @@ import ( // Highlight type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/highlighting.ts#L153-L156 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/highlighting.ts#L153-L156 type Highlight struct { // BoundaryChars A string that contains each boundary character. BoundaryChars *string `json:"boundary_chars,omitempty"` @@ -133,7 +134,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "boundary_chars": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BoundaryChars", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -150,7 +151,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "BoundaryMaxScan", err) } s.BoundaryMaxScan = &value case float64: @@ -160,13 +161,13 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "boundary_scanner": if err := dec.Decode(&s.BoundaryScanner); err != nil { - return err + return fmt.Errorf("%s | %w", "BoundaryScanner", err) } case "boundary_scanner_locale": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BoundaryScannerLocale", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -177,7 +178,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "encoder": if err := dec.Decode(&s.Encoder); err != nil { - return err + return fmt.Errorf("%s | %w", "Encoder", err) } case "fields": @@ -185,7 +186,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]HighlightField, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "force_source": @@ -195,7 +196,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ForceSource", err) } s.ForceSource = &value case bool: @@ -210,7 +211,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FragmentSize", err) } s.FragmentSize = &value case float64: @@ -220,7 +221,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "fragmenter": if err := dec.Decode(&s.Fragmenter); err != nil { - return err + return fmt.Errorf("%s | %w", "Fragmenter", err) } case "highlight_filter": @@ -230,7 +231,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "HighlightFilter", err) } s.HighlightFilter = &value case bool: @@ -239,7 +240,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "highlight_query": if err := dec.Decode(&s.HighlightQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "HighlightQuery", err) } case "max_analyzed_offset": @@ -250,7 +251,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAnalyzedOffset", err) } s.MaxAnalyzedOffset = &value case float64: @@ -266,7 +267,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxFragmentLength", err) } s.MaxFragmentLength = &value case float64: @@ -282,7 +283,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NoMatchSize", err) } s.NoMatchSize = &value case float64: @@ -298,7 +299,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfFragments", err) } s.NumberOfFragments = &value case float64: @@ -311,12 +312,12 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { s.Options = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Options); err != nil { - return err + return fmt.Errorf("%s | %w", "Options", err) } case "order": if err := dec.Decode(&s.Order); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } case "phrase_limit": @@ -327,7 +328,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PhraseLimit", err) } s.PhraseLimit = &value case float64: @@ -337,12 +338,12 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "post_tags": if err := dec.Decode(&s.PostTags); err != nil { - return err + return fmt.Errorf("%s | %w", "PostTags", err) } case "pre_tags": if err := dec.Decode(&s.PreTags); err != nil { - return err + return fmt.Errorf("%s | %w", "PreTags", err) } case "require_field_match": @@ -352,7 +353,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RequireFieldMatch", err) } s.RequireFieldMatch = &value case bool: @@ -361,12 +362,12 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "tags_schema": if err := dec.Decode(&s.TagsSchema); err != nil { - return err + return fmt.Errorf("%s | %w", "TagsSchema", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/highlightfield.go b/typedapi/types/highlightfield.go index b9a29dcb32..ba8a7bae5b 100644 --- a/typedapi/types/highlightfield.go +++ b/typedapi/types/highlightfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -36,7 +37,7 @@ import ( // HighlightField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/highlighting.ts#L193-L197 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/highlighting.ts#L193-L197 type HighlightField struct { Analyzer Analyzer `json:"analyzer,omitempty"` // BoundaryChars A string that contains each boundary character. @@ -237,7 +238,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "boundary_chars": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BoundaryChars", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -254,7 +255,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "BoundaryMaxScan", err) } s.BoundaryMaxScan = &value case float64: @@ -264,13 +265,13 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "boundary_scanner": if err := dec.Decode(&s.BoundaryScanner); err != nil { - return err + return fmt.Errorf("%s | %w", "BoundaryScanner", err) } case "boundary_scanner_locale": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BoundaryScannerLocale", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -286,7 +287,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ForceSource", err) } s.ForceSource = &value case bool: @@ -301,7 +302,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FragmentOffset", err) } s.FragmentOffset = &value case float64: @@ -317,7 +318,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FragmentSize", err) } s.FragmentSize = &value case float64: @@ -327,7 +328,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "fragmenter": if err := dec.Decode(&s.Fragmenter); err != nil { - return err + return fmt.Errorf("%s | %w", "Fragmenter", err) } case "highlight_filter": @@ -337,7 +338,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "HighlightFilter", err) } s.HighlightFilter = &value case bool: @@ -346,7 +347,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "highlight_query": if err := dec.Decode(&s.HighlightQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "HighlightQuery", err) } case "matched_fields": @@ -355,13 +356,13 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "MatchedFields", err) } s.MatchedFields = append(s.MatchedFields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.MatchedFields); err != nil { - return err + return fmt.Errorf("%s | %w", "MatchedFields", err) } } @@ -373,7 +374,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAnalyzedOffset", err) } s.MaxAnalyzedOffset = &value case float64: @@ -389,7 +390,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxFragmentLength", err) } s.MaxFragmentLength = &value case float64: @@ -405,7 +406,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NoMatchSize", err) } s.NoMatchSize = &value case float64: @@ -421,7 +422,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfFragments", err) } s.NumberOfFragments = &value case float64: @@ -434,12 +435,12 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { s.Options = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Options); err != nil { - return err + return fmt.Errorf("%s | %w", "Options", err) } case "order": if err := dec.Decode(&s.Order); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } case "phrase_limit": @@ -450,7 +451,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PhraseLimit", err) } s.PhraseLimit = &value case float64: @@ -460,12 +461,12 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "post_tags": if err := dec.Decode(&s.PostTags); err != nil { - return err + return fmt.Errorf("%s | %w", "PostTags", err) } case "pre_tags": if err := dec.Decode(&s.PreTags); err != nil { - return err + return fmt.Errorf("%s | %w", "PreTags", err) } case "require_field_match": @@ -475,7 +476,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RequireFieldMatch", err) } s.RequireFieldMatch = &value case bool: @@ -484,12 +485,12 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "tags_schema": if err := dec.Decode(&s.TagsSchema); err != nil { - return err + return fmt.Errorf("%s | %w", "TagsSchema", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/hint.go b/typedapi/types/hint.go index 119ec4d3c8..8b06ee1604 100644 --- a/typedapi/types/hint.go +++ b/typedapi/types/hint.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Hint type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/suggest_user_profiles/types.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/suggest_user_profiles/types.ts#L23-L34 type Hint struct { // Labels A single key-value pair to match against the labels section // of a profile. A profile is considered matching if it matches diff --git a/typedapi/types/histogramaggregate.go b/typedapi/types/histogramaggregate.go index c0ee76d5ea..ec29d6c970 100644 --- a/typedapi/types/histogramaggregate.go +++ b/typedapi/types/histogramaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // HistogramAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L340-L341 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L340-L341 type HistogramAggregate struct { Buckets BucketsHistogramBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *HistogramAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]HistogramBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []HistogramBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/histogramaggregation.go b/typedapi/types/histogramaggregation.go index 78974e7f0a..5174c99d54 100644 --- a/typedapi/types/histogramaggregation.go +++ b/typedapi/types/histogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // HistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L500-L546 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L500-L546 type HistogramAggregation struct { // ExtendedBounds Enables extending the bounds of the histogram beyond the data itself. ExtendedBounds *ExtendedBoundsdouble `json:"extended_bounds,omitempty"` @@ -84,18 +85,18 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { case "extended_bounds": if err := dec.Decode(&s.ExtendedBounds); err != nil { - return err + return fmt.Errorf("%s | %w", "ExtendedBounds", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -106,7 +107,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { case "hard_bounds": if err := dec.Decode(&s.HardBounds); err != nil { - return err + return fmt.Errorf("%s | %w", "HardBounds", err) } case "interval": @@ -116,7 +117,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Interval", err) } f := Float64(value) s.Interval = &f @@ -132,7 +133,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Keyed", err) } s.Keyed = &value case bool: @@ -141,7 +142,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min_doc_count": @@ -152,7 +153,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocCount", err) } s.MinDocCount = &value case float64: @@ -167,7 +168,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } f := Float64(value) s.Missing = &f @@ -179,7 +180,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -195,7 +196,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Offset", err) } f := Float64(value) s.Offset = &f @@ -214,13 +215,13 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]sortorder.SortOrder, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } s.Order = o case '[': o := make([]map[string]sortorder.SortOrder, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } s.Order = o } @@ -228,7 +229,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -237,7 +238,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -246,7 +247,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -254,7 +255,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/histogrambucket.go b/typedapi/types/histogrambucket.go index d74f743034..5142e2621a 100644 --- a/typedapi/types/histogrambucket.go +++ b/typedapi/types/histogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // HistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L343-L346 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L343-L346 type HistogramBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -62,7 +62,7 @@ func (s *HistogramBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -77,7 +77,7 @@ func (s *HistogramBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } f := Float64(value) s.Key = f @@ -89,7 +89,7 @@ func (s *HistogramBucket) UnmarshalJSON(data []byte) error { case "key_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "KeyAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -112,490 +112,490 @@ func (s *HistogramBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -605,7 +605,7 @@ func (s *HistogramBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/histogramgrouping.go b/typedapi/types/histogramgrouping.go index 79a36b6195..2e7c83ad3d 100644 --- a/typedapi/types/histogramgrouping.go +++ b/typedapi/types/histogramgrouping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // HistogramGrouping type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/_types/Groupings.ts#L84-L97 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/_types/Groupings.ts#L84-L97 type HistogramGrouping struct { // Fields The set of fields that you wish to build histograms for. // All fields specified must be some kind of numeric. @@ -65,13 +66,13 @@ func (s *HistogramGrouping) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } s.Fields = append(s.Fields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } } @@ -82,7 +83,7 @@ func (s *HistogramGrouping) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Interval", err) } s.Interval = value case float64: diff --git a/typedapi/types/histogramproperty.go b/typedapi/types/histogramproperty.go index 6006759ae6..86bef09384 100644 --- a/typedapi/types/histogramproperty.go +++ b/typedapi/types/histogramproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // HistogramProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/specialized.ts#L54-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/specialized.ts#L54-L57 type HistogramProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -61,7 +62,7 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -379,7 +380,7 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -394,7 +395,7 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -406,7 +407,7 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -718,7 +719,7 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/hit.go b/typedapi/types/hit.go index 660953994a..a237452581 100644 --- a/typedapi/types/hit.go +++ b/typedapi/types/hit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Hit type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/hits.ts#L40-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/hits.ts#L40-L64 type Hit struct { Explanation_ *Explanation `json:"_explanation,omitempty"` Fields map[string]json.RawMessage `json:"fields,omitempty"` @@ -70,7 +71,7 @@ func (s *Hit) UnmarshalJSON(data []byte) error { case "_explanation": if err := dec.Decode(&s.Explanation_); err != nil { - return err + return fmt.Errorf("%s | %w", "Explanation_", err) } case "fields": @@ -78,7 +79,7 @@ func (s *Hit) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "highlight": @@ -86,12 +87,12 @@ func (s *Hit) UnmarshalJSON(data []byte) error { s.Highlight = make(map[string][]string, 0) } if err := dec.Decode(&s.Highlight); err != nil { - return err + return fmt.Errorf("%s | %w", "Highlight", err) } case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "ignored_field_values": @@ -99,17 +100,17 @@ func (s *Hit) UnmarshalJSON(data []byte) error { s.IgnoredFieldValues = make(map[string][]string, 0) } if err := dec.Decode(&s.IgnoredFieldValues); err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoredFieldValues", err) } case "_ignored": if err := dec.Decode(&s.Ignored_); err != nil { - return err + return fmt.Errorf("%s | %w", "Ignored_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "inner_hits": @@ -117,23 +118,23 @@ func (s *Hit) UnmarshalJSON(data []byte) error { s.InnerHits = make(map[string]InnerHitsResult, 0) } if err := dec.Decode(&s.InnerHits); err != nil { - return err + return fmt.Errorf("%s | %w", "InnerHits", err) } case "matched_queries": if err := dec.Decode(&s.MatchedQueries); err != nil { - return err + return fmt.Errorf("%s | %w", "MatchedQueries", err) } case "_nested": if err := dec.Decode(&s.Nested_); err != nil { - return err + return fmt.Errorf("%s | %w", "Nested_", err) } case "_node": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -149,7 +150,7 @@ func (s *Hit) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryTerm_", err) } s.PrimaryTerm_ = &value case float64: @@ -160,7 +161,7 @@ func (s *Hit) UnmarshalJSON(data []byte) error { case "_routing": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -171,18 +172,18 @@ func (s *Hit) UnmarshalJSON(data []byte) error { case "_score": if err := dec.Decode(&s.Score_); err != nil { - return err + return fmt.Errorf("%s | %w", "Score_", err) } case "_seq_no": if err := dec.Decode(&s.SeqNo_); err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNo_", err) } case "_shard": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Shard_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -193,17 +194,17 @@ func (s *Hit) UnmarshalJSON(data []byte) error { case "sort": if err := dec.Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } case "_version": if err := dec.Decode(&s.Version_); err != nil { - return err + return fmt.Errorf("%s | %w", "Version_", err) } } diff --git a/typedapi/types/hitsevent.go b/typedapi/types/hitsevent.go index 4443d6be75..b84ccf269c 100644 --- a/typedapi/types/hitsevent.go +++ b/typedapi/types/hitsevent.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // HitsEvent type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/eql/_types/EqlHits.ts#L41-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/eql/_types/EqlHits.ts#L41-L49 type HitsEvent struct { Fields map[string][]json.RawMessage `json:"fields,omitempty"` // Id_ Unique identifier for the event. This ID is only unique within the index. @@ -60,22 +61,22 @@ func (s *HitsEvent) UnmarshalJSON(data []byte) error { s.Fields = make(map[string][]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } } diff --git a/typedapi/types/hitsmetadata.go b/typedapi/types/hitsmetadata.go index 93565ba9cb..2beb2f7a44 100644 --- a/typedapi/types/hitsmetadata.go +++ b/typedapi/types/hitsmetadata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // HitsMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/hits.ts#L66-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/hits.ts#L66-L72 type HitsMetadata struct { Hits []Hit `json:"hits"` MaxScore Float64 `json:"max_score,omitempty"` @@ -55,17 +56,17 @@ func (s *HitsMetadata) UnmarshalJSON(data []byte) error { case "hits": if err := dec.Decode(&s.Hits); err != nil { - return err + return fmt.Errorf("%s | %w", "Hits", err) } case "max_score": if err := dec.Decode(&s.MaxScore); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxScore", err) } case "total": if err := dec.Decode(&s.Total); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } } diff --git a/typedapi/types/hitssequence.go b/typedapi/types/hitssequence.go index 24a5a6a966..7c64f3a515 100644 --- a/typedapi/types/hitssequence.go +++ b/typedapi/types/hitssequence.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // HitsSequence type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/eql/_types/EqlHits.ts#L51-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/eql/_types/EqlHits.ts#L51-L59 type HitsSequence struct { // Events Contains events matching the query. Each object represents a matching event. Events []HitsEvent `json:"events"` diff --git a/typedapi/types/holtlinearmodelsettings.go b/typedapi/types/holtlinearmodelsettings.go index 5998b0748e..41584e7132 100644 --- a/typedapi/types/holtlinearmodelsettings.go +++ b/typedapi/types/holtlinearmodelsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // HoltLinearModelSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L271-L274 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L271-L274 type HoltLinearModelSettings struct { Alpha *float32 `json:"alpha,omitempty"` Beta *float32 `json:"beta,omitempty"` @@ -58,7 +59,7 @@ func (s *HoltLinearModelSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Alpha", err) } f := float32(value) s.Alpha = &f @@ -74,7 +75,7 @@ func (s *HoltLinearModelSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Beta", err) } f := float32(value) s.Beta = &f diff --git a/typedapi/types/holtmovingaverageaggregation.go b/typedapi/types/holtmovingaverageaggregation.go index 95af03ab25..03d3b362bf 100644 --- a/typedapi/types/holtmovingaverageaggregation.go +++ b/typedapi/types/holtmovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // HoltMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L257-L260 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L257-L260 type HoltMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -68,13 +69,13 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,12 +86,12 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "minimize": @@ -100,7 +101,7 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Minimize", err) } s.Minimize = &value case bool: @@ -109,13 +110,13 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "model": if err := dec.Decode(&s.Model); err != nil { - return err + return fmt.Errorf("%s | %w", "Model", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -132,7 +133,7 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Predict", err) } s.Predict = &value case float64: @@ -142,7 +143,7 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "settings": if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "window": @@ -153,7 +154,7 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Window", err) } s.Window = &value case float64: diff --git a/typedapi/types/holtwintersmodelsettings.go b/typedapi/types/holtwintersmodelsettings.go index 4da52a5015..ebc4868866 100644 --- a/typedapi/types/holtwintersmodelsettings.go +++ b/typedapi/types/holtwintersmodelsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // HoltWintersModelSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L275-L282 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L275-L282 type HoltWintersModelSettings struct { Alpha *float32 `json:"alpha,omitempty"` Beta *float32 `json:"beta,omitempty"` @@ -64,7 +65,7 @@ func (s *HoltWintersModelSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Alpha", err) } f := float32(value) s.Alpha = &f @@ -80,7 +81,7 @@ func (s *HoltWintersModelSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Beta", err) } f := float32(value) s.Beta = &f @@ -96,7 +97,7 @@ func (s *HoltWintersModelSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Gamma", err) } f := float32(value) s.Gamma = &f @@ -112,7 +113,7 @@ func (s *HoltWintersModelSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Pad", err) } s.Pad = &value case bool: @@ -127,7 +128,7 @@ func (s *HoltWintersModelSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Period", err) } s.Period = &value case float64: @@ -137,7 +138,7 @@ func (s *HoltWintersModelSettings) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/holtwintersmovingaverageaggregation.go b/typedapi/types/holtwintersmovingaverageaggregation.go index fac8c37bab..1a4482f256 100644 --- a/typedapi/types/holtwintersmovingaverageaggregation.go +++ b/typedapi/types/holtwintersmovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // HoltWintersMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L262-L265 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L262-L265 type HoltWintersMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -68,13 +69,13 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,12 +86,12 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "minimize": @@ -100,7 +101,7 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Minimize", err) } s.Minimize = &value case bool: @@ -109,13 +110,13 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "model": if err := dec.Decode(&s.Model); err != nil { - return err + return fmt.Errorf("%s | %w", "Model", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -132,7 +133,7 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Predict", err) } s.Predict = &value case float64: @@ -142,7 +143,7 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "settings": if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "window": @@ -153,7 +154,7 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Window", err) } s.Window = &value case float64: diff --git a/typedapi/types/hop.go b/typedapi/types/hop.go index 81658ec383..981004e28f 100644 --- a/typedapi/types/hop.go +++ b/typedapi/types/hop.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Hop type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/graph/_types/Hop.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/graph/_types/Hop.ts#L23-L36 type Hop struct { // Connections Specifies one or more fields from which you want to extract terms that are // associated with the specified vertices. diff --git a/typedapi/types/hotthread.go b/typedapi/types/hotthread.go index 64a6ec5b35..7e243dca06 100644 --- a/typedapi/types/hotthread.go +++ b/typedapi/types/hotthread.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // HotThread type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/hot_threads/types.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/hot_threads/types.ts#L23-L28 type HotThread struct { Hosts []string `json:"hosts"` NodeId string `json:"node_id"` @@ -54,22 +55,22 @@ func (s *HotThread) UnmarshalJSON(data []byte) error { case "hosts": if err := dec.Decode(&s.Hosts); err != nil { - return err + return fmt.Errorf("%s | %w", "Hosts", err) } case "node_id": if err := dec.Decode(&s.NodeId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } case "node_name": if err := dec.Decode(&s.NodeName); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeName", err) } case "threads": if err := dec.Decode(&s.Threads); err != nil { - return err + return fmt.Errorf("%s | %w", "Threads", err) } } diff --git a/typedapi/types/hourandminute.go b/typedapi/types/hourandminute.go index 1e10c7170b..af1ee857c3 100644 --- a/typedapi/types/hourandminute.go +++ b/typedapi/types/hourandminute.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // HourAndMinute type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Schedule.ts#L105-L108 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Schedule.ts#L105-L108 type HourAndMinute struct { Hour []int `json:"hour"` Minute []int `json:"minute"` diff --git a/typedapi/types/hourlyschedule.go b/typedapi/types/hourlyschedule.go index 2ec440a85a..8e0bb0e0f3 100644 --- a/typedapi/types/hourlyschedule.go +++ b/typedapi/types/hourlyschedule.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // HourlySchedule type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Schedule.ts#L47-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Schedule.ts#L47-L49 type HourlySchedule struct { Minute []int `json:"minute"` } diff --git a/typedapi/types/htmlstripcharfilter.go b/typedapi/types/htmlstripcharfilter.go index 79274e1ded..9a14470d5b 100644 --- a/typedapi/types/htmlstripcharfilter.go +++ b/typedapi/types/htmlstripcharfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // HtmlStripCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/char_filters.ts#L43-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/char_filters.ts#L43-L45 type HtmlStripCharFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` @@ -52,12 +53,12 @@ func (s *HtmlStripCharFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/http.go b/typedapi/types/http.go index d7795c4025..870f6da678 100644 --- a/typedapi/types/http.go +++ b/typedapi/types/http.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Http type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L633-L647 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L633-L647 type Http struct { // Clients Information on current and recently-closed HTTP client connections. // Clients that have been closed longer than the @@ -60,7 +61,7 @@ func (s *Http) UnmarshalJSON(data []byte) error { case "clients": if err := dec.Decode(&s.Clients); err != nil { - return err + return fmt.Errorf("%s | %w", "Clients", err) } case "current_open": @@ -71,7 +72,7 @@ func (s *Http) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CurrentOpen", err) } s.CurrentOpen = &value case float64: @@ -86,7 +87,7 @@ func (s *Http) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalOpened", err) } s.TotalOpened = &value case float64: diff --git a/typedapi/types/httpemailattachment.go b/typedapi/types/httpemailattachment.go index 0bc7804f5d..8fd121d568 100644 --- a/typedapi/types/httpemailattachment.go +++ b/typedapi/types/httpemailattachment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // HttpEmailAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L218-L222 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L218-L222 type HttpEmailAttachment struct { ContentType *string `json:"content_type,omitempty"` Inline *bool `json:"inline,omitempty"` @@ -55,7 +56,7 @@ func (s *HttpEmailAttachment) UnmarshalJSON(data []byte) error { case "content_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ContentType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -71,7 +72,7 @@ func (s *HttpEmailAttachment) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Inline", err) } s.Inline = &value case bool: @@ -80,7 +81,7 @@ func (s *HttpEmailAttachment) UnmarshalJSON(data []byte) error { case "request": if err := dec.Decode(&s.Request); err != nil { - return err + return fmt.Errorf("%s | %w", "Request", err) } } diff --git a/typedapi/types/httpheaders.go b/typedapi/types/httpheaders.go index bc203263a1..7617b6c8c8 100644 --- a/typedapi/types/httpheaders.go +++ b/typedapi/types/httpheaders.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // HttpHeaders type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L158-L158 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L158-L158 type HttpHeaders map[string][]string diff --git a/typedapi/types/httpinput.go b/typedapi/types/httpinput.go index 9a65d77787..eb67ccc117 100644 --- a/typedapi/types/httpinput.go +++ b/typedapi/types/httpinput.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // HttpInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L44-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L44-L48 type HttpInput struct { Extract []string `json:"extract,omitempty"` Request *HttpInputRequestDefinition `json:"request,omitempty"` diff --git a/typedapi/types/httpinputauthentication.go b/typedapi/types/httpinputauthentication.go index 96482abdb1..93ea419922 100644 --- a/typedapi/types/httpinputauthentication.go +++ b/typedapi/types/httpinputauthentication.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // HttpInputAuthentication type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L50-L52 type HttpInputAuthentication struct { Basic HttpInputBasicAuthentication `json:"basic"` } diff --git a/typedapi/types/httpinputbasicauthentication.go b/typedapi/types/httpinputbasicauthentication.go index 853886ec8b..adeae0444e 100644 --- a/typedapi/types/httpinputbasicauthentication.go +++ b/typedapi/types/httpinputbasicauthentication.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // HttpInputBasicAuthentication type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L54-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L54-L57 type HttpInputBasicAuthentication struct { Password string `json:"password"` Username string `json:"username"` @@ -52,12 +53,12 @@ func (s *HttpInputBasicAuthentication) UnmarshalJSON(data []byte) error { case "password": if err := dec.Decode(&s.Password); err != nil { - return err + return fmt.Errorf("%s | %w", "Password", err) } case "username": if err := dec.Decode(&s.Username); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } } diff --git a/typedapi/types/httpinputproxy.go b/typedapi/types/httpinputproxy.go index b7d488efb6..08613dfd08 100644 --- a/typedapi/types/httpinputproxy.go +++ b/typedapi/types/httpinputproxy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // HttpInputProxy type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L67-L70 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L67-L70 type HttpInputProxy struct { Host string `json:"host"` Port uint `json:"port"` @@ -52,12 +53,12 @@ func (s *HttpInputProxy) UnmarshalJSON(data []byte) error { case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "port": if err := dec.Decode(&s.Port); err != nil { - return err + return fmt.Errorf("%s | %w", "Port", err) } } diff --git a/typedapi/types/httpinputrequestdefinition.go b/typedapi/types/httpinputrequestdefinition.go index e12822eae1..c92ddee15e 100644 --- a/typedapi/types/httpinputrequestdefinition.go +++ b/typedapi/types/httpinputrequestdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // HttpInputRequestDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L72-L86 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L72-L86 type HttpInputRequestDefinition struct { Auth *HttpInputAuthentication `json:"auth,omitempty"` Body *string `json:"body,omitempty"` @@ -67,13 +68,13 @@ func (s *HttpInputRequestDefinition) UnmarshalJSON(data []byte) error { case "auth": if err := dec.Decode(&s.Auth); err != nil { - return err + return fmt.Errorf("%s | %w", "Auth", err) } case "body": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Body", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,7 +85,7 @@ func (s *HttpInputRequestDefinition) UnmarshalJSON(data []byte) error { case "connection_timeout": if err := dec.Decode(&s.ConnectionTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "ConnectionTimeout", err) } case "headers": @@ -92,17 +93,17 @@ func (s *HttpInputRequestDefinition) UnmarshalJSON(data []byte) error { s.Headers = make(map[string]string, 0) } if err := dec.Decode(&s.Headers); err != nil { - return err + return fmt.Errorf("%s | %w", "Headers", err) } case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "method": if err := dec.Decode(&s.Method); err != nil { - return err + return fmt.Errorf("%s | %w", "Method", err) } case "params": @@ -110,13 +111,13 @@ func (s *HttpInputRequestDefinition) UnmarshalJSON(data []byte) error { s.Params = make(map[string]string, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -127,28 +128,28 @@ func (s *HttpInputRequestDefinition) UnmarshalJSON(data []byte) error { case "port": if err := dec.Decode(&s.Port); err != nil { - return err + return fmt.Errorf("%s | %w", "Port", err) } case "proxy": if err := dec.Decode(&s.Proxy); err != nil { - return err + return fmt.Errorf("%s | %w", "Proxy", err) } case "read_timeout": if err := dec.Decode(&s.ReadTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "ReadTimeout", err) } case "scheme": if err := dec.Decode(&s.Scheme); err != nil { - return err + return fmt.Errorf("%s | %w", "Scheme", err) } case "url": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Url", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/httpinputrequestresult.go b/typedapi/types/httpinputrequestresult.go index 1f17a652b3..f3368aab22 100644 --- a/typedapi/types/httpinputrequestresult.go +++ b/typedapi/types/httpinputrequestresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // HttpInputRequestResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L300-L300 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L300-L300 type HttpInputRequestResult struct { Auth *HttpInputAuthentication `json:"auth,omitempty"` Body *string `json:"body,omitempty"` @@ -67,13 +68,13 @@ func (s *HttpInputRequestResult) UnmarshalJSON(data []byte) error { case "auth": if err := dec.Decode(&s.Auth); err != nil { - return err + return fmt.Errorf("%s | %w", "Auth", err) } case "body": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Body", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,7 +85,7 @@ func (s *HttpInputRequestResult) UnmarshalJSON(data []byte) error { case "connection_timeout": if err := dec.Decode(&s.ConnectionTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "ConnectionTimeout", err) } case "headers": @@ -92,17 +93,17 @@ func (s *HttpInputRequestResult) UnmarshalJSON(data []byte) error { s.Headers = make(map[string]string, 0) } if err := dec.Decode(&s.Headers); err != nil { - return err + return fmt.Errorf("%s | %w", "Headers", err) } case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "method": if err := dec.Decode(&s.Method); err != nil { - return err + return fmt.Errorf("%s | %w", "Method", err) } case "params": @@ -110,13 +111,13 @@ func (s *HttpInputRequestResult) UnmarshalJSON(data []byte) error { s.Params = make(map[string]string, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -127,28 +128,28 @@ func (s *HttpInputRequestResult) UnmarshalJSON(data []byte) error { case "port": if err := dec.Decode(&s.Port); err != nil { - return err + return fmt.Errorf("%s | %w", "Port", err) } case "proxy": if err := dec.Decode(&s.Proxy); err != nil { - return err + return fmt.Errorf("%s | %w", "Proxy", err) } case "read_timeout": if err := dec.Decode(&s.ReadTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "ReadTimeout", err) } case "scheme": if err := dec.Decode(&s.Scheme); err != nil { - return err + return fmt.Errorf("%s | %w", "Scheme", err) } case "url": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Url", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/httpinputresponseresult.go b/typedapi/types/httpinputresponseresult.go index e6e1074f8a..79f6c40b3c 100644 --- a/typedapi/types/httpinputresponseresult.go +++ b/typedapi/types/httpinputresponseresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // HttpInputResponseResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L302-L306 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L302-L306 type HttpInputResponseResult struct { Body string `json:"body"` Headers HttpHeaders `json:"headers"` @@ -55,7 +56,7 @@ func (s *HttpInputResponseResult) UnmarshalJSON(data []byte) error { case "body": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Body", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,7 +67,7 @@ func (s *HttpInputResponseResult) UnmarshalJSON(data []byte) error { case "headers": if err := dec.Decode(&s.Headers); err != nil { - return err + return fmt.Errorf("%s | %w", "Headers", err) } case "status": @@ -77,7 +78,7 @@ func (s *HttpInputResponseResult) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } s.Status = value case float64: diff --git a/typedapi/types/hunspelltokenfilter.go b/typedapi/types/hunspelltokenfilter.go index 627ada9313..c106aed2e2 100644 --- a/typedapi/types/hunspelltokenfilter.go +++ b/typedapi/types/hunspelltokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // HunspellTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L200-L206 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L200-L206 type HunspellTokenFilter struct { Dedup *bool `json:"dedup,omitempty"` Dictionary *string `json:"dictionary,omitempty"` @@ -62,7 +63,7 @@ func (s *HunspellTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Dedup", err) } s.Dedup = &value case bool: @@ -72,7 +73,7 @@ func (s *HunspellTokenFilter) UnmarshalJSON(data []byte) error { case "dictionary": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Dictionary", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,7 +85,7 @@ func (s *HunspellTokenFilter) UnmarshalJSON(data []byte) error { case "locale": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Locale", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -100,7 +101,7 @@ func (s *HunspellTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "LongestOnly", err) } s.LongestOnly = &value case bool: @@ -109,12 +110,12 @@ func (s *HunspellTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/hyperparameter.go b/typedapi/types/hyperparameter.go index ac62f00606..5481cede8f 100644 --- a/typedapi/types/hyperparameter.go +++ b/typedapi/types/hyperparameter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Hyperparameter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L217-L231 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L217-L231 type Hyperparameter struct { // AbsoluteImportance A positive number showing how much the parameter influences the variation of // the loss function. For hyperparameters with values that are not specified by @@ -72,7 +73,7 @@ func (s *Hyperparameter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AbsoluteImportance", err) } f := Float64(value) s.AbsoluteImportance = &f @@ -83,7 +84,7 @@ func (s *Hyperparameter) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "relative_importance": @@ -93,7 +94,7 @@ func (s *Hyperparameter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RelativeImportance", err) } f := Float64(value) s.RelativeImportance = &f @@ -109,7 +110,7 @@ func (s *Hyperparameter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Supplied", err) } s.Supplied = value case bool: @@ -123,7 +124,7 @@ func (s *Hyperparameter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } f := Float64(value) s.Value = f diff --git a/typedapi/types/hyperparameters.go b/typedapi/types/hyperparameters.go index 1a79d1c3e3..280d66b1f5 100644 --- a/typedapi/types/hyperparameters.go +++ b/typedapi/types/hyperparameters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Hyperparameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L419-L525 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L419-L525 type Hyperparameters struct { // Alpha Advanced configuration option. // Machine learning uses loss guided tree growing, which means that the decision @@ -153,7 +154,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Alpha", err) } f := Float64(value) s.Alpha = &f @@ -169,7 +170,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DownsampleFactor", err) } f := Float64(value) s.DownsampleFactor = &f @@ -185,7 +186,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Eta", err) } f := Float64(value) s.Eta = &f @@ -201,7 +202,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "EtaGrowthRatePerTree", err) } f := Float64(value) s.EtaGrowthRatePerTree = &f @@ -217,7 +218,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureBagFraction", err) } f := Float64(value) s.FeatureBagFraction = &f @@ -233,7 +234,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Gamma", err) } f := Float64(value) s.Gamma = &f @@ -249,7 +250,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lambda", err) } f := Float64(value) s.Lambda = &f @@ -266,7 +267,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAttemptsToAddTree", err) } s.MaxAttemptsToAddTree = &value case float64: @@ -282,7 +283,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxOptimizationRoundsPerHyperparameter", err) } s.MaxOptimizationRoundsPerHyperparameter = &value case float64: @@ -298,7 +299,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTrees", err) } s.MaxTrees = &value case float64: @@ -314,7 +315,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumFolds", err) } s.NumFolds = &value case float64: @@ -330,7 +331,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumSplitsPerFeature", err) } s.NumSplitsPerFeature = &value case float64: @@ -346,7 +347,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SoftTreeDepthLimit", err) } s.SoftTreeDepthLimit = &value case float64: @@ -361,7 +362,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SoftTreeDepthTolerance", err) } f := Float64(value) s.SoftTreeDepthTolerance = &f diff --git a/typedapi/types/hyphenationdecompoundertokenfilter.go b/typedapi/types/hyphenationdecompoundertokenfilter.go index c61de9b703..abdbe2aed5 100644 --- a/typedapi/types/hyphenationdecompoundertokenfilter.go +++ b/typedapi/types/hyphenationdecompoundertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // HyphenationDecompounderTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L58-L60 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L58-L60 type HyphenationDecompounderTokenFilter struct { HyphenationPatternsPath *string `json:"hyphenation_patterns_path,omitempty"` MaxSubwordSize *int `json:"max_subword_size,omitempty"` @@ -61,7 +62,7 @@ func (s *HyphenationDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case "hyphenation_patterns_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "HyphenationPatternsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,7 +79,7 @@ func (s *HyphenationDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxSubwordSize", err) } s.MaxSubwordSize = &value case float64: @@ -94,7 +95,7 @@ func (s *HyphenationDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinSubwordSize", err) } s.MinSubwordSize = &value case float64: @@ -110,7 +111,7 @@ func (s *HyphenationDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinWordSize", err) } s.MinWordSize = &value case float64: @@ -125,7 +126,7 @@ func (s *HyphenationDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OnlyLongestMatch", err) } s.OnlyLongestMatch = &value case bool: @@ -134,23 +135,23 @@ func (s *HyphenationDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "word_list": if err := dec.Decode(&s.WordList); err != nil { - return err + return fmt.Errorf("%s | %w", "WordList", err) } case "word_list_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "WordListPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/icuanalyzer.go b/typedapi/types/icuanalyzer.go index f004a6b6a1..166e670153 100644 --- a/typedapi/types/icuanalyzer.go +++ b/typedapi/types/icuanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -29,7 +29,7 @@ import ( // IcuAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L67-L71 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L67-L71 type IcuAnalyzer struct { Method icunormalizationtype.IcuNormalizationType `json:"method"` Mode icunormalizationmode.IcuNormalizationMode `json:"mode"` diff --git a/typedapi/types/icucollationtokenfilter.go b/typedapi/types/icucollationtokenfilter.go index f7949d643c..566461ee2e 100644 --- a/typedapi/types/icucollationtokenfilter.go +++ b/typedapi/types/icucollationtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -35,7 +36,7 @@ import ( // IcuCollationTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L51-L65 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L51-L65 type IcuCollationTokenFilter struct { Alternate *icucollationalternate.IcuCollationAlternate `json:"alternate,omitempty"` CaseFirst *icucollationcasefirst.IcuCollationCaseFirst `json:"caseFirst,omitempty"` @@ -70,12 +71,12 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { case "alternate": if err := dec.Decode(&s.Alternate); err != nil { - return err + return fmt.Errorf("%s | %w", "Alternate", err) } case "caseFirst": if err := dec.Decode(&s.CaseFirst); err != nil { - return err + return fmt.Errorf("%s | %w", "CaseFirst", err) } case "caseLevel": @@ -85,7 +86,7 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CaseLevel", err) } s.CaseLevel = &value case bool: @@ -95,7 +96,7 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { case "country": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Country", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -106,7 +107,7 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { case "decomposition": if err := dec.Decode(&s.Decomposition); err != nil { - return err + return fmt.Errorf("%s | %w", "Decomposition", err) } case "hiraganaQuaternaryMode": @@ -116,7 +117,7 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "HiraganaQuaternaryMode", err) } s.HiraganaQuaternaryMode = &value case bool: @@ -126,7 +127,7 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { case "language": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Language", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -142,7 +143,7 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Numeric", err) } s.Numeric = &value case bool: @@ -152,7 +153,7 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { case "rules": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Rules", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -163,18 +164,18 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { case "strength": if err := dec.Decode(&s.Strength); err != nil { - return err + return fmt.Errorf("%s | %w", "Strength", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "variableTop": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "VariableTop", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -186,7 +187,7 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { case "variant": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Variant", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -197,7 +198,7 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/icufoldingtokenfilter.go b/typedapi/types/icufoldingtokenfilter.go index b128eb1bb8..909d4d0b79 100644 --- a/typedapi/types/icufoldingtokenfilter.go +++ b/typedapi/types/icufoldingtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IcuFoldingTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L46-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L46-L49 type IcuFoldingTokenFilter struct { Type string `json:"type,omitempty"` UnicodeSetFilter string `json:"unicode_set_filter"` @@ -54,13 +55,13 @@ func (s *IcuFoldingTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "unicode_set_filter": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UnicodeSetFilter", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -71,7 +72,7 @@ func (s *IcuFoldingTokenFilter) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/icunormalizationcharfilter.go b/typedapi/types/icunormalizationcharfilter.go index 9e3aea6266..b27a67d190 100644 --- a/typedapi/types/icunormalizationcharfilter.go +++ b/typedapi/types/icunormalizationcharfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/icunormalizationmode" @@ -32,7 +33,7 @@ import ( // IcuNormalizationCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L40-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L40-L44 type IcuNormalizationCharFilter struct { Mode *icunormalizationmode.IcuNormalizationMode `json:"mode,omitempty"` Name *icunormalizationtype.IcuNormalizationType `json:"name,omitempty"` @@ -57,22 +58,22 @@ func (s *IcuNormalizationCharFilter) UnmarshalJSON(data []byte) error { case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/icunormalizationtokenfilter.go b/typedapi/types/icunormalizationtokenfilter.go index 39285ff01e..a5d65082b1 100644 --- a/typedapi/types/icunormalizationtokenfilter.go +++ b/typedapi/types/icunormalizationtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/icunormalizationtype" @@ -31,7 +32,7 @@ import ( // IcuNormalizationTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L35-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L35-L38 type IcuNormalizationTokenFilter struct { Name icunormalizationtype.IcuNormalizationType `json:"name"` Type string `json:"type,omitempty"` @@ -55,17 +56,17 @@ func (s *IcuNormalizationTokenFilter) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/icutokenizer.go b/typedapi/types/icutokenizer.go index d613ac4f00..b1b080b25e 100644 --- a/typedapi/types/icutokenizer.go +++ b/typedapi/types/icutokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IcuTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L30-L33 type IcuTokenizer struct { RuleFiles string `json:"rule_files"` Type string `json:"type,omitempty"` @@ -55,7 +56,7 @@ func (s *IcuTokenizer) UnmarshalJSON(data []byte) error { case "rule_files": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RuleFiles", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,12 +67,12 @@ func (s *IcuTokenizer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/icutransformtokenfilter.go b/typedapi/types/icutransformtokenfilter.go index 616011338e..f0bda1523e 100644 --- a/typedapi/types/icutransformtokenfilter.go +++ b/typedapi/types/icutransformtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // IcuTransformTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/icu-plugin.ts#L24-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/icu-plugin.ts#L24-L28 type IcuTransformTokenFilter struct { Dir *icutransformdirection.IcuTransformDirection `json:"dir,omitempty"` Id string `json:"id"` @@ -57,13 +58,13 @@ func (s *IcuTransformTokenFilter) UnmarshalJSON(data []byte) error { case "dir": if err := dec.Decode(&s.Dir); err != nil { - return err + return fmt.Errorf("%s | %w", "Dir", err) } case "id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,12 +75,12 @@ func (s *IcuTransformTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/ids.go b/typedapi/types/ids.go index 53b4d4119e..1416ee3a5b 100644 --- a/typedapi/types/ids.go +++ b/typedapi/types/ids.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Ids type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L62-L62 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L62-L62 type Ids []string diff --git a/typedapi/types/idsquery.go b/typedapi/types/idsquery.go index e1fac04e88..b3d25257a3 100644 --- a/typedapi/types/idsquery.go +++ b/typedapi/types/idsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IdsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L80-L85 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L80-L85 type IdsQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -65,7 +66,7 @@ func (s *IdsQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -77,7 +78,7 @@ func (s *IdsQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -92,13 +93,13 @@ func (s *IdsQuery) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } s.Values = append(s.Values, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Values); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } } diff --git a/typedapi/types/ilm.go b/typedapi/types/ilm.go index 7cea1e276d..b36b86f3c3 100644 --- a/typedapi/types/ilm.go +++ b/typedapi/types/ilm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Ilm type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L162-L165 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L162-L165 type Ilm struct { PolicyCount int `json:"policy_count"` PolicyStats []IlmPolicyStatistics `json:"policy_stats"` @@ -59,7 +60,7 @@ func (s *Ilm) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PolicyCount", err) } s.PolicyCount = value case float64: @@ -69,7 +70,7 @@ func (s *Ilm) UnmarshalJSON(data []byte) error { case "policy_stats": if err := dec.Decode(&s.PolicyStats); err != nil { - return err + return fmt.Errorf("%s | %w", "PolicyStats", err) } } diff --git a/typedapi/types/ilmindicator.go b/typedapi/types/ilmindicator.go index c6268dfcde..d25c948ab3 100644 --- a/typedapi/types/ilmindicator.go +++ b/typedapi/types/ilmindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // IlmIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L145-L149 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L145-L149 type IlmIndicator struct { Details *IlmIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` @@ -58,28 +59,28 @@ func (s *IlmIndicator) UnmarshalJSON(data []byte) error { case "details": if err := dec.Decode(&s.Details); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } case "diagnosis": if err := dec.Decode(&s.Diagnosis); err != nil { - return err + return fmt.Errorf("%s | %w", "Diagnosis", err) } case "impacts": if err := dec.Decode(&s.Impacts); err != nil { - return err + return fmt.Errorf("%s | %w", "Impacts", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "symptom": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Symptom", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/ilmindicatordetails.go b/typedapi/types/ilmindicatordetails.go index d2d3e4d781..e2719180a7 100644 --- a/typedapi/types/ilmindicatordetails.go +++ b/typedapi/types/ilmindicatordetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // IlmIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L150-L153 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L150-L153 type IlmIndicatorDetails struct { IlmStatus lifecycleoperationmode.LifecycleOperationMode `json:"ilm_status"` Policies int64 `json:"policies"` @@ -55,7 +56,7 @@ func (s *IlmIndicatorDetails) UnmarshalJSON(data []byte) error { case "ilm_status": if err := dec.Decode(&s.IlmStatus); err != nil { - return err + return fmt.Errorf("%s | %w", "IlmStatus", err) } case "policies": @@ -65,7 +66,7 @@ func (s *IlmIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Policies", err) } s.Policies = value case float64: diff --git a/typedapi/types/ilmpolicy.go b/typedapi/types/ilmpolicy.go index 833d8aec56..58083476d5 100644 --- a/typedapi/types/ilmpolicy.go +++ b/typedapi/types/ilmpolicy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // IlmPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/_types/Policy.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/_types/Policy.ts#L23-L26 type IlmPolicy struct { Meta_ Metadata `json:"_meta,omitempty"` Phases Phases `json:"phases"` @@ -52,12 +53,12 @@ func (s *IlmPolicy) UnmarshalJSON(data []byte) error { case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "phases": if err := dec.Decode(&s.Phases); err != nil { - return err + return fmt.Errorf("%s | %w", "Phases", err) } } diff --git a/typedapi/types/ilmpolicystatistics.go b/typedapi/types/ilmpolicystatistics.go index fb158cd459..2a2907ec1a 100644 --- a/typedapi/types/ilmpolicystatistics.go +++ b/typedapi/types/ilmpolicystatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IlmPolicyStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L157-L160 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L157-L160 type IlmPolicyStatistics struct { IndicesManaged int `json:"indices_managed"` Phases Phases `json:"phases"` @@ -59,7 +60,7 @@ func (s *IlmPolicyStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesManaged", err) } s.IndicesManaged = value case float64: @@ -69,7 +70,7 @@ func (s *IlmPolicyStatistics) UnmarshalJSON(data []byte) error { case "phases": if err := dec.Decode(&s.Phases); err != nil { - return err + return fmt.Errorf("%s | %w", "Phases", err) } } diff --git a/typedapi/types/impact.go b/typedapi/types/impact.go index 9bbf4d6b74..f236c64b0c 100644 --- a/typedapi/types/impact.go +++ b/typedapi/types/impact.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // Impact type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L65-L70 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L65-L70 type Impact struct { Description string `json:"description"` Id string `json:"id"` @@ -58,7 +59,7 @@ func (s *Impact) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -70,7 +71,7 @@ func (s *Impact) UnmarshalJSON(data []byte) error { case "id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -81,7 +82,7 @@ func (s *Impact) UnmarshalJSON(data []byte) error { case "impact_areas": if err := dec.Decode(&s.ImpactAreas); err != nil { - return err + return fmt.Errorf("%s | %w", "ImpactAreas", err) } case "severity": @@ -92,7 +93,7 @@ func (s *Impact) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Severity", err) } s.Severity = value case float64: diff --git a/typedapi/types/indexaction.go b/typedapi/types/indexaction.go index 4f99d6fc31..2bac2e6647 100644 --- a/typedapi/types/indexaction.go +++ b/typedapi/types/indexaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/optype" @@ -32,7 +33,7 @@ import ( // IndexAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L256-L265 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L256-L265 type IndexAction struct { DocId *string `json:"doc_id,omitempty"` ExecutionTimeField *string `json:"execution_time_field,omitempty"` @@ -59,32 +60,32 @@ func (s *IndexAction) UnmarshalJSON(data []byte) error { case "doc_id": if err := dec.Decode(&s.DocId); err != nil { - return err + return fmt.Errorf("%s | %w", "DocId", err) } case "execution_time_field": if err := dec.Decode(&s.ExecutionTimeField); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutionTimeField", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "op_type": if err := dec.Decode(&s.OpType); err != nil { - return err + return fmt.Errorf("%s | %w", "OpType", err) } case "refresh": if err := dec.Decode(&s.Refresh); err != nil { - return err + return fmt.Errorf("%s | %w", "Refresh", err) } case "timeout": if err := dec.Decode(&s.Timeout); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } } diff --git a/typedapi/types/indexaliases.go b/typedapi/types/indexaliases.go index 2929f2e0a7..9c0b919d1f 100644 --- a/typedapi/types/indexaliases.go +++ b/typedapi/types/indexaliases.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndexAliases type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get_alias/IndicesGetAliasResponse.ts#L36-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get_alias/IndicesGetAliasResponse.ts#L36-L38 type IndexAliases struct { Aliases map[string]AliasDefinition `json:"aliases"` } diff --git a/typedapi/types/indexanddatastreamaction.go b/typedapi/types/indexanddatastreamaction.go index 2930bd2044..776298fc00 100644 --- a/typedapi/types/indexanddatastreamaction.go +++ b/typedapi/types/indexanddatastreamaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // IndexAndDataStreamAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/modify_data_stream/types.ts#L39-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/modify_data_stream/types.ts#L39-L44 type IndexAndDataStreamAction struct { // DataStream Data stream targeted by the action. DataStream string `json:"data_stream"` @@ -54,12 +55,12 @@ func (s *IndexAndDataStreamAction) UnmarshalJSON(data []byte) error { case "data_stream": if err := dec.Decode(&s.DataStream); err != nil { - return err + return fmt.Errorf("%s | %w", "DataStream", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } } diff --git a/typedapi/types/indexcapabilities.go b/typedapi/types/indexcapabilities.go index 37d58317e9..df3aad1f74 100644 --- a/typedapi/types/indexcapabilities.go +++ b/typedapi/types/indexcapabilities.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndexCapabilities type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_rollup_index_caps/types.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_rollup_index_caps/types.ts#L24-L26 type IndexCapabilities struct { RollupJobs []RollupJobSummary `json:"rollup_jobs"` } diff --git a/typedapi/types/indexdetails.go b/typedapi/types/indexdetails.go index 6ad753e66f..e2553b9c06 100644 --- a/typedapi/types/indexdetails.go +++ b/typedapi/types/indexdetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndexDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotIndexDetails.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotIndexDetails.ts#L23-L28 type IndexDetails struct { MaxSegmentsPerShard int64 `json:"max_segments_per_shard"` ShardCount int `json:"shard_count"` @@ -60,7 +61,7 @@ func (s *IndexDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxSegmentsPerShard", err) } s.MaxSegmentsPerShard = value case float64: @@ -76,7 +77,7 @@ func (s *IndexDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardCount", err) } s.ShardCount = value case float64: @@ -86,7 +87,7 @@ func (s *IndexDetails) UnmarshalJSON(data []byte) error { case "size": if err := dec.Decode(&s.Size); err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } case "size_in_bytes": @@ -96,7 +97,7 @@ func (s *IndexDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SizeInBytes", err) } s.SizeInBytes = value case float64: diff --git a/typedapi/types/indexfield.go b/typedapi/types/indexfield.go index 25362c2eba..2e20603a35 100644 --- a/typedapi/types/indexfield.go +++ b/typedapi/types/indexfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndexField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/meta-fields.ts#L46-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/meta-fields.ts#L46-L48 type IndexField struct { Enabled bool `json:"enabled"` } @@ -57,7 +58,7 @@ func (s *IndexField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: diff --git a/typedapi/types/indexhealthstats.go b/typedapi/types/indexhealthstats.go index d1f95948d4..436ec4cee6 100644 --- a/typedapi/types/indexhealthstats.go +++ b/typedapi/types/indexhealthstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // IndexHealthStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/health/types.ts#L24-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/health/types.ts#L24-L34 type IndexHealthStats struct { ActivePrimaryShards int `json:"active_primary_shards"` ActiveShards int `json:"active_shards"` @@ -68,7 +69,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ActivePrimaryShards", err) } s.ActivePrimaryShards = value case float64: @@ -84,7 +85,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ActiveShards", err) } s.ActiveShards = value case float64: @@ -100,7 +101,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "InitializingShards", err) } s.InitializingShards = value case float64: @@ -116,7 +117,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfReplicas", err) } s.NumberOfReplicas = value case float64: @@ -132,7 +133,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfShards", err) } s.NumberOfShards = value case float64: @@ -148,7 +149,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RelocatingShards", err) } s.RelocatingShards = value case float64: @@ -161,12 +162,12 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { s.Shards = make(map[string]ShardHealthStats, 0) } if err := dec.Decode(&s.Shards); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "unassigned_shards": @@ -177,7 +178,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UnassignedShards", err) } s.UnassignedShards = value case float64: diff --git a/typedapi/types/indexingpressurememorysummary.go b/typedapi/types/indexingpressurememorysummary.go index 2f8a291c4b..222bdf4dc1 100644 --- a/typedapi/types/indexingpressurememorysummary.go +++ b/typedapi/types/indexingpressurememorysummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndexingPressureMemorySummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L580-L589 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L580-L589 type IndexingPressureMemorySummary struct { AllInBytes int64 `json:"all_in_bytes"` CombinedCoordinatingAndPrimaryInBytes int64 `json:"combined_coordinating_and_primary_in_bytes"` @@ -64,7 +65,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllInBytes", err) } s.AllInBytes = value case float64: @@ -79,7 +80,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CombinedCoordinatingAndPrimaryInBytes", err) } s.CombinedCoordinatingAndPrimaryInBytes = value case float64: @@ -94,7 +95,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CoordinatingInBytes", err) } s.CoordinatingInBytes = value case float64: @@ -109,7 +110,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CoordinatingRejections", err) } s.CoordinatingRejections = &value case float64: @@ -124,7 +125,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryInBytes", err) } s.PrimaryInBytes = value case float64: @@ -139,7 +140,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryRejections", err) } s.PrimaryRejections = &value case float64: @@ -154,7 +155,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ReplicaInBytes", err) } s.ReplicaInBytes = value case float64: @@ -169,7 +170,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ReplicaRejections", err) } s.ReplicaRejections = &value case float64: diff --git a/typedapi/types/indexingslowlogsettings.go b/typedapi/types/indexingslowlogsettings.go index 0523db2174..9a7f31f513 100644 --- a/typedapi/types/indexingslowlogsettings.go +++ b/typedapi/types/indexingslowlogsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndexingSlowlogSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L550-L555 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L554-L559 type IndexingSlowlogSettings struct { Level *string `json:"level,omitempty"` Reformat *bool `json:"reformat,omitempty"` @@ -56,7 +57,7 @@ func (s *IndexingSlowlogSettings) UnmarshalJSON(data []byte) error { case "level": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Level", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,7 +73,7 @@ func (s *IndexingSlowlogSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Reformat", err) } s.Reformat = &value case bool: @@ -87,7 +88,7 @@ func (s *IndexingSlowlogSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } s.Source = &value case float64: @@ -97,7 +98,7 @@ func (s *IndexingSlowlogSettings) UnmarshalJSON(data []byte) error { case "threshold": if err := dec.Decode(&s.Threshold); err != nil { - return err + return fmt.Errorf("%s | %w", "Threshold", err) } } diff --git a/typedapi/types/indexingslowlogtresholds.go b/typedapi/types/indexingslowlogtresholds.go index edd90765d6..f979be5e2b 100644 --- a/typedapi/types/indexingslowlogtresholds.go +++ b/typedapi/types/indexingslowlogtresholds.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndexingSlowlogTresholds type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L557-L564 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L561-L568 type IndexingSlowlogTresholds struct { // Index The indexing slow log, similar in functionality to the search slow log. The // log file name ends with `_index_indexing_slowlog.json`. diff --git a/typedapi/types/indexingstats.go b/typedapi/types/indexingstats.go index ee9a7ca82b..573e7c1804 100644 --- a/typedapi/types/indexingstats.go +++ b/typedapi/types/indexingstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndexingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L143-L159 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L143-L159 type IndexingStats struct { DeleteCurrent int64 `json:"delete_current"` DeleteTime Duration `json:"delete_time,omitempty"` @@ -71,7 +72,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DeleteCurrent", err) } s.DeleteCurrent = value case float64: @@ -81,12 +82,12 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { case "delete_time": if err := dec.Decode(&s.DeleteTime); err != nil { - return err + return fmt.Errorf("%s | %w", "DeleteTime", err) } case "delete_time_in_millis": if err := dec.Decode(&s.DeleteTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "DeleteTimeInMillis", err) } case "delete_total": @@ -96,7 +97,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DeleteTotal", err) } s.DeleteTotal = value case float64: @@ -111,7 +112,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexCurrent", err) } s.IndexCurrent = value case float64: @@ -126,7 +127,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexFailed", err) } s.IndexFailed = value case float64: @@ -136,12 +137,12 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { case "index_time": if err := dec.Decode(&s.IndexTime); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexTime", err) } case "index_time_in_millis": if err := dec.Decode(&s.IndexTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexTimeInMillis", err) } case "index_total": @@ -151,7 +152,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexTotal", err) } s.IndexTotal = value case float64: @@ -166,7 +167,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsThrottled", err) } s.IsThrottled = value case bool: @@ -180,7 +181,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NoopUpdateTotal", err) } s.NoopUpdateTotal = value case float64: @@ -190,12 +191,12 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { case "throttle_time": if err := dec.Decode(&s.ThrottleTime); err != nil { - return err + return fmt.Errorf("%s | %w", "ThrottleTime", err) } case "throttle_time_in_millis": if err := dec.Decode(&s.ThrottleTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ThrottleTimeInMillis", err) } case "types": @@ -203,7 +204,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { s.Types = make(map[string]IndexingStats, 0) } if err := dec.Decode(&s.Types); err != nil { - return err + return fmt.Errorf("%s | %w", "Types", err) } case "write_load": @@ -213,7 +214,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "WriteLoad", err) } f := Float64(value) s.WriteLoad = &f diff --git a/typedapi/types/indexmappingrecord.go b/typedapi/types/indexmappingrecord.go index 94fdfd9e25..37e731d8a2 100644 --- a/typedapi/types/indexmappingrecord.go +++ b/typedapi/types/indexmappingrecord.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndexMappingRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get_mapping/IndicesGetMappingResponse.ts#L28-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get_mapping/IndicesGetMappingResponse.ts#L28-L31 type IndexMappingRecord struct { Item *TypeMapping `json:"item,omitempty"` Mappings TypeMapping `json:"mappings"` diff --git a/typedapi/types/indexoperation.go b/typedapi/types/indexoperation.go index 56c250a2ef..a55524deb1 100644 --- a/typedapi/types/indexoperation.go +++ b/typedapi/types/indexoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // IndexOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/bulk/types.ts#L132-L132 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/bulk/types.ts#L132-L132 type IndexOperation struct { // DynamicTemplates A map from the full name of fields to the name of dynamic templates. // Defaults to an empty map. @@ -81,12 +82,12 @@ func (s *IndexOperation) UnmarshalJSON(data []byte) error { s.DynamicTemplates = make(map[string]string, 0) } if err := dec.Decode(&s.DynamicTemplates); err != nil { - return err + return fmt.Errorf("%s | %w", "DynamicTemplates", err) } case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "if_primary_term": @@ -96,7 +97,7 @@ func (s *IndexOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IfPrimaryTerm", err) } s.IfPrimaryTerm = &value case float64: @@ -106,18 +107,18 @@ func (s *IndexOperation) UnmarshalJSON(data []byte) error { case "if_seq_no": if err := dec.Decode(&s.IfSeqNo); err != nil { - return err + return fmt.Errorf("%s | %w", "IfSeqNo", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "pipeline": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pipeline", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -133,7 +134,7 @@ func (s *IndexOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RequireAlias", err) } s.RequireAlias = &value case bool: @@ -142,17 +143,17 @@ func (s *IndexOperation) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "version_type": if err := dec.Decode(&s.VersionType); err != nil { - return err + return fmt.Errorf("%s | %w", "VersionType", err) } } diff --git a/typedapi/types/indexprivilegescheck.go b/typedapi/types/indexprivilegescheck.go index 2e2ee41485..49ba9615b9 100644 --- a/typedapi/types/indexprivilegescheck.go +++ b/typedapi/types/indexprivilegescheck.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // IndexPrivilegesCheck type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/has_privileges/types.ts#L33-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/has_privileges/types.ts#L33-L44 type IndexPrivilegesCheck struct { // AllowRestrictedIndices This needs to be set to true (default is false) if using wildcards or regexps // for patterns that cover restricted indices. @@ -71,7 +72,7 @@ func (s *IndexPrivilegesCheck) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowRestrictedIndices", err) } s.AllowRestrictedIndices = &value case bool: @@ -84,19 +85,19 @@ func (s *IndexPrivilegesCheck) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Names", err) } s.Names = append(s.Names, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Names); err != nil { - return err + return fmt.Errorf("%s | %w", "Names", err) } } case "privileges": if err := dec.Decode(&s.Privileges); err != nil { - return err + return fmt.Errorf("%s | %w", "Privileges", err) } } diff --git a/typedapi/types/indexresult.go b/typedapi/types/indexresult.go index fd8b388c7f..15ff8756ac 100644 --- a/typedapi/types/indexresult.go +++ b/typedapi/types/indexresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndexResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L267-L269 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L267-L269 type IndexResult struct { Response IndexResultSummary `json:"response"` } diff --git a/typedapi/types/indexresultsummary.go b/typedapi/types/indexresultsummary.go index 2f8eb3f433..0f4cef7a55 100644 --- a/typedapi/types/indexresultsummary.go +++ b/typedapi/types/indexresultsummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // IndexResultSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L271-L277 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L271-L277 type IndexResultSummary struct { Created bool `json:"created"` Id string `json:"id"` @@ -63,7 +64,7 @@ func (s *IndexResultSummary) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Created", err) } s.Created = value case bool: @@ -72,22 +73,22 @@ func (s *IndexResultSummary) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "result": if err := dec.Decode(&s.Result); err != nil { - return err + return fmt.Errorf("%s | %w", "Result", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/indexrouting.go b/typedapi/types/indexrouting.go index 6549507167..d565ce8f39 100644 --- a/typedapi/types/indexrouting.go +++ b/typedapi/types/indexrouting.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndexRouting type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexRouting.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexRouting.ts#L22-L25 type IndexRouting struct { Allocation *IndexRoutingAllocation `json:"allocation,omitempty"` Rebalance *IndexRoutingRebalance `json:"rebalance,omitempty"` diff --git a/typedapi/types/indexroutingallocation.go b/typedapi/types/indexroutingallocation.go index 8d551e95c2..4b03267421 100644 --- a/typedapi/types/indexroutingallocation.go +++ b/typedapi/types/indexroutingallocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // IndexRoutingAllocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexRouting.ts#L27-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexRouting.ts#L27-L32 type IndexRoutingAllocation struct { Disk *IndexRoutingAllocationDisk `json:"disk,omitempty"` Enable *indexroutingallocationoptions.IndexRoutingAllocationOptions `json:"enable,omitempty"` diff --git a/typedapi/types/indexroutingallocationdisk.go b/typedapi/types/indexroutingallocationdisk.go index 38aacdeadb..3fe62e8d87 100644 --- a/typedapi/types/indexroutingallocationdisk.go +++ b/typedapi/types/indexroutingallocationdisk.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndexRoutingAllocationDisk type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexRouting.ts#L62-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexRouting.ts#L62-L64 type IndexRoutingAllocationDisk struct { ThresholdEnabled string `json:"threshold_enabled,omitempty"` } @@ -53,7 +54,7 @@ func (s *IndexRoutingAllocationDisk) UnmarshalJSON(data []byte) error { case "threshold_enabled": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ThresholdEnabled", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/indexroutingallocationinclude.go b/typedapi/types/indexroutingallocationinclude.go index b793fd0321..52860351b0 100644 --- a/typedapi/types/indexroutingallocationinclude.go +++ b/typedapi/types/indexroutingallocationinclude.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndexRoutingAllocationInclude type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexRouting.ts#L52-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexRouting.ts#L52-L55 type IndexRoutingAllocationInclude struct { Id_ *string `json:"_id,omitempty"` TierPreference_ *string `json:"_tier_preference,omitempty"` @@ -53,13 +54,13 @@ func (s *IndexRoutingAllocationInclude) UnmarshalJSON(data []byte) error { case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_tier_preference": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TierPreference_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/indexroutingallocationinitialrecovery.go b/typedapi/types/indexroutingallocationinitialrecovery.go index 75b45c28f5..d46fb767f7 100644 --- a/typedapi/types/indexroutingallocationinitialrecovery.go +++ b/typedapi/types/indexroutingallocationinitialrecovery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // IndexRoutingAllocationInitialRecovery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexRouting.ts#L57-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexRouting.ts#L57-L59 type IndexRoutingAllocationInitialRecovery struct { Id_ *string `json:"_id,omitempty"` } @@ -51,7 +52,7 @@ func (s *IndexRoutingAllocationInitialRecovery) UnmarshalJSON(data []byte) error case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } } diff --git a/typedapi/types/indexroutingrebalance.go b/typedapi/types/indexroutingrebalance.go index 3b188c2001..b735daa040 100644 --- a/typedapi/types/indexroutingrebalance.go +++ b/typedapi/types/indexroutingrebalance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // IndexRoutingRebalance type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexRouting.ts#L34-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexRouting.ts#L34-L36 type IndexRoutingRebalance struct { Enable indexroutingrebalanceoptions.IndexRoutingRebalanceOptions `json:"enable"` } diff --git a/typedapi/types/indexsegment.go b/typedapi/types/indexsegment.go index 265ea36751..80ab5d5953 100644 --- a/typedapi/types/indexsegment.go +++ b/typedapi/types/indexsegment.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndexSegment type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/segments/types.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/segments/types.ts#L24-L26 type IndexSegment struct { Shards map[string][]ShardsSegment `json:"shards"` } diff --git a/typedapi/types/indexsegmentsort.go b/typedapi/types/indexsegmentsort.go index ea125a4aa2..c3c479ff1f 100644 --- a/typedapi/types/indexsegmentsort.go +++ b/typedapi/types/indexsegmentsort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/segmentsortmissing" @@ -33,7 +34,7 @@ import ( // IndexSegmentSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSegmentSort.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSegmentSort.ts#L22-L27 type IndexSegmentSort struct { Field []string `json:"field,omitempty"` Missing []segmentsortmissing.SegmentSortMissing `json:"missing,omitempty"` @@ -62,13 +63,13 @@ func (s *IndexSegmentSort) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } s.Field = append(s.Field, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } } @@ -78,13 +79,13 @@ func (s *IndexSegmentSort) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := &segmentsortmissing.SegmentSortMissing{} if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } s.Missing = append(s.Missing, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } } @@ -94,13 +95,13 @@ func (s *IndexSegmentSort) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := &segmentsortmode.SegmentSortMode{} if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } s.Mode = append(s.Mode, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } } @@ -110,13 +111,13 @@ func (s *IndexSegmentSort) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := &segmentsortorder.SegmentSortOrder{} if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } s.Order = append(s.Order, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Order); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } } diff --git a/typedapi/types/indexsettingblocks.go b/typedapi/types/indexsettingblocks.go index 92b6766339..0b817ba9dc 100644 --- a/typedapi/types/indexsettingblocks.go +++ b/typedapi/types/indexsettingblocks.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // IndexSettingBlocks type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L248-L254 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L252-L258 type IndexSettingBlocks struct { Metadata Stringifiedboolean `json:"metadata,omitempty"` Read Stringifiedboolean `json:"read,omitempty"` @@ -55,27 +56,27 @@ func (s *IndexSettingBlocks) UnmarshalJSON(data []byte) error { case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "read": if err := dec.Decode(&s.Read); err != nil { - return err + return fmt.Errorf("%s | %w", "Read", err) } case "read_only": if err := dec.Decode(&s.ReadOnly); err != nil { - return err + return fmt.Errorf("%s | %w", "ReadOnly", err) } case "read_only_allow_delete": if err := dec.Decode(&s.ReadOnlyAllowDelete); err != nil { - return err + return fmt.Errorf("%s | %w", "ReadOnlyAllowDelete", err) } case "write": if err := dec.Decode(&s.Write); err != nil { - return err + return fmt.Errorf("%s | %w", "Write", err) } } diff --git a/typedapi/types/indexsettings.go b/typedapi/types/indexsettings.go index 416fb93b31..7efa15cd84 100644 --- a/typedapi/types/indexsettings.go +++ b/typedapi/types/indexsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -33,7 +33,7 @@ import ( // IndexSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L69-L167 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L69-L167 type IndexSettings struct { Analysis *IndexSettingsAnalysis `json:"analysis,omitempty"` // Analyze Settings to define analyzers, tokenizers, token filters and character @@ -88,9 +88,9 @@ type IndexSettings struct { Settings *IndexSettings `json:"settings,omitempty"` // Similarity Configure custom similarity settings to customize how search results are // scored. - Similarity *SettingsSimilarity `json:"similarity,omitempty"` - SoftDeletes *SoftDeletes `json:"soft_deletes,omitempty"` - Sort *IndexSegmentSort `json:"sort,omitempty"` + Similarity map[string]SettingsSimilarity `json:"similarity,omitempty"` + SoftDeletes *SoftDeletes `json:"soft_deletes,omitempty"` + Sort *IndexSegmentSort `json:"sort,omitempty"` // Store The store module allows you to control how index data is stored and accessed // on disk. Store *Storage `json:"store,omitempty"` @@ -119,18 +119,18 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "analysis": if err := dec.Decode(&s.Analysis); err != nil { - return err + return fmt.Errorf("%s | %w", "Analysis", err) } case "analyze": if err := dec.Decode(&s.Analyze); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyze", err) } case "auto_expand_replicas": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AutoExpandReplicas", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -141,18 +141,18 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "blocks": if err := dec.Decode(&s.Blocks); err != nil { - return err + return fmt.Errorf("%s | %w", "Blocks", err) } case "check_on_startup": if err := dec.Decode(&s.CheckOnStartup); err != nil { - return err + return fmt.Errorf("%s | %w", "CheckOnStartup", err) } case "codec": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Codec", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -163,28 +163,28 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "creation_date": if err := dec.Decode(&s.CreationDate); err != nil { - return err + return fmt.Errorf("%s | %w", "CreationDate", err) } case "creation_date_string": if err := dec.Decode(&s.CreationDateString); err != nil { - return err + return fmt.Errorf("%s | %w", "CreationDateString", err) } case "default_pipeline": if err := dec.Decode(&s.DefaultPipeline); err != nil { - return err + return fmt.Errorf("%s | %w", "DefaultPipeline", err) } case "final_pipeline": if err := dec.Decode(&s.FinalPipeline); err != nil { - return err + return fmt.Errorf("%s | %w", "FinalPipeline", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -195,13 +195,13 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "gc_deletes": if err := dec.Decode(&s.GcDeletes); err != nil { - return err + return fmt.Errorf("%s | %w", "GcDeletes", err) } case "hidden": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Hidden", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -212,27 +212,27 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "highlight": if err := dec.Decode(&s.Highlight); err != nil { - return err + return fmt.Errorf("%s | %w", "Highlight", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "indexing_pressure": if err := dec.Decode(&s.IndexingPressure); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingPressure", err) } case "indexing.slowlog": if err := dec.Decode(&s.IndexingSlowlog); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingSlowlog", err) } case "lifecycle": if err := dec.Decode(&s.Lifecycle); err != nil { - return err + return fmt.Errorf("%s | %w", "Lifecycle", err) } case "load_fixed_bitset_filters_eagerly": @@ -242,7 +242,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "LoadFixedBitsetFiltersEagerly", err) } s.LoadFixedBitsetFiltersEagerly = &value case bool: @@ -251,7 +251,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "mapping": if err := dec.Decode(&s.Mapping); err != nil { - return err + return fmt.Errorf("%s | %w", "Mapping", err) } case "max_docvalue_fields_search": @@ -262,7 +262,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxDocvalueFieldsSearch", err) } s.MaxDocvalueFieldsSearch = &value case float64: @@ -278,7 +278,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxInnerResultWindow", err) } s.MaxInnerResultWindow = &value case float64: @@ -294,7 +294,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxNgramDiff", err) } s.MaxNgramDiff = &value case float64: @@ -310,7 +310,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxRefreshListeners", err) } s.MaxRefreshListeners = &value case float64: @@ -326,7 +326,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxRegexLength", err) } s.MaxRegexLength = &value case float64: @@ -342,7 +342,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxRescoreWindow", err) } s.MaxRescoreWindow = &value case float64: @@ -358,7 +358,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxResultWindow", err) } s.MaxResultWindow = &value case float64: @@ -374,7 +374,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxScriptFields", err) } s.MaxScriptFields = &value case float64: @@ -390,7 +390,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxShingleDiff", err) } s.MaxShingleDiff = &value case float64: @@ -406,7 +406,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxSlicesPerScroll", err) } s.MaxSlicesPerScroll = &value case float64: @@ -422,7 +422,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTermsCount", err) } s.MaxTermsCount = &value case float64: @@ -432,13 +432,13 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "merge": if err := dec.Decode(&s.Merge); err != nil { - return err + return fmt.Errorf("%s | %w", "Merge", err) } case "mode": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -450,7 +450,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "number_of_replicas": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfReplicas", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -467,7 +467,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfRoutingShards", err) } s.NumberOfRoutingShards = &value case float64: @@ -478,7 +478,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "number_of_shards": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfShards", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -490,7 +490,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "priority": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Priority", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -501,32 +501,32 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "provided_name": if err := dec.Decode(&s.ProvidedName); err != nil { - return err + return fmt.Errorf("%s | %w", "ProvidedName", err) } case "queries": if err := dec.Decode(&s.Queries); err != nil { - return err + return fmt.Errorf("%s | %w", "Queries", err) } case "query_string": if err := dec.Decode(&s.QueryString); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryString", err) } case "refresh_interval": if err := dec.Decode(&s.RefreshInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshInterval", err) } case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "routing_partition_size": if err := dec.Decode(&s.RoutingPartitionSize); err != nil { - return err + return fmt.Errorf("%s | %w", "RoutingPartitionSize", err) } case "routing_path": @@ -535,49 +535,115 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "RoutingPath", err) } s.RoutingPath = append(s.RoutingPath, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.RoutingPath); err != nil { - return err + return fmt.Errorf("%s | %w", "RoutingPath", err) } } case "search": if err := dec.Decode(&s.Search); err != nil { - return err + return fmt.Errorf("%s | %w", "Search", err) } case "settings": if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { - return err + if s.Similarity == nil { + s.Similarity = make(map[string]SettingsSimilarity, 0) + } + refs := make(map[string]json.RawMessage, 0) + dec.Decode(&refs) + for key, message := range refs { + kind := make(map[string]interface{}) + buf := bytes.NewReader(message) + localDec := json.NewDecoder(buf) + localDec.Decode(&kind) + buf.Seek(0, io.SeekStart) + + switch kind["type"] { + case "BM25": + oo := NewSettingsSimilarityBm25() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Similarity[key] = oo + case "boolean": + oo := NewSettingsSimilarityBoolean() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Similarity[key] = oo + case "DFI": + oo := NewSettingsSimilarityDfi() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Similarity[key] = oo + case "DFR": + oo := NewSettingsSimilarityDfr() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Similarity[key] = oo + case "IB": + oo := NewSettingsSimilarityIb() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Similarity[key] = oo + case "LMDirichlet": + oo := NewSettingsSimilarityLmd() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Similarity[key] = oo + case "LMJelinekMercer": + oo := NewSettingsSimilarityLmj() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Similarity[key] = oo + case "scripted": + oo := NewSettingsSimilarityScripted() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Similarity[key] = oo + default: + oo := new(SettingsSimilarity) + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Similarity[key] = oo + } } case "soft_deletes": if err := dec.Decode(&s.SoftDeletes); err != nil { - return err + return fmt.Errorf("%s | %w", "SoftDeletes", err) } case "sort": if err := dec.Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } case "store": if err := dec.Decode(&s.Store); err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } case "time_series": if err := dec.Decode(&s.TimeSeries); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeries", err) } case "top_metrics_max_size": @@ -588,7 +654,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TopMetricsMaxSize", err) } s.TopMetricsMaxSize = &value case float64: @@ -598,18 +664,18 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "translog": if err := dec.Decode(&s.Translog); err != nil { - return err + return fmt.Errorf("%s | %w", "Translog", err) } case "uuid": if err := dec.Decode(&s.Uuid); err != nil { - return err + return fmt.Errorf("%s | %w", "Uuid", err) } case "verified_before_close": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "VerifiedBeforeClose", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -620,7 +686,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } default: @@ -631,7 +697,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { } raw := new(json.RawMessage) if err := dec.Decode(&raw); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexSettings", err) } s.IndexSettings[key] = *raw } @@ -674,6 +740,7 @@ func (s IndexSettings) MarshalJSON() ([]byte, error) { func NewIndexSettings() *IndexSettings { r := &IndexSettings{ IndexSettings: make(map[string]json.RawMessage, 0), + Similarity: make(map[string]SettingsSimilarity, 0), } return r diff --git a/typedapi/types/indexsettingsanalysis.go b/typedapi/types/indexsettingsanalysis.go index 9466f9b1d1..1f70691321 100644 --- a/typedapi/types/indexsettingsanalysis.go +++ b/typedapi/types/indexsettingsanalysis.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -29,7 +29,7 @@ import ( // IndexSettingsAnalysis type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L313-L319 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L317-L323 type IndexSettingsAnalysis struct { Analyzer map[string]Analyzer `json:"analyzer,omitempty"` CharFilter map[string]CharFilter `json:"char_filter,omitempty"` diff --git a/typedapi/types/indexsettingslifecycle.go b/typedapi/types/indexsettingslifecycle.go index 659edbf19e..8445843aa9 100644 --- a/typedapi/types/indexsettingslifecycle.go +++ b/typedapi/types/indexsettingslifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndexSettingsLifecycle type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L270-L303 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L274-L307 type IndexSettingsLifecycle struct { // IndexingComplete Indicates whether or not the index has been rolled over. Automatically set to // true when ILM completes the rollover action. @@ -80,12 +81,12 @@ func (s *IndexSettingsLifecycle) UnmarshalJSON(data []byte) error { case "indexing_complete": if err := dec.Decode(&s.IndexingComplete); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingComplete", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "origination_date": @@ -95,7 +96,7 @@ func (s *IndexSettingsLifecycle) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "OriginationDate", err) } s.OriginationDate = &value case float64: @@ -110,7 +111,7 @@ func (s *IndexSettingsLifecycle) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ParseOriginationDate", err) } s.ParseOriginationDate = &value case bool: @@ -120,7 +121,7 @@ func (s *IndexSettingsLifecycle) UnmarshalJSON(data []byte) error { case "rollover_alias": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RolloverAlias", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -131,7 +132,7 @@ func (s *IndexSettingsLifecycle) UnmarshalJSON(data []byte) error { case "step": if err := dec.Decode(&s.Step); err != nil { - return err + return fmt.Errorf("%s | %w", "Step", err) } } diff --git a/typedapi/types/indexsettingslifecyclestep.go b/typedapi/types/indexsettingslifecyclestep.go index 401ddcd629..d4e9bf9c99 100644 --- a/typedapi/types/indexsettingslifecyclestep.go +++ b/typedapi/types/indexsettingslifecyclestep.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // IndexSettingsLifecycleStep type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L305-L311 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L309-L315 type IndexSettingsLifecycleStep struct { // WaitTimeThreshold Time to wait for the cluster to resolve allocation issues during an ILM // shrink action. Must be greater than 1h (1 hour). @@ -54,7 +55,7 @@ func (s *IndexSettingsLifecycleStep) UnmarshalJSON(data []byte) error { case "wait_time_threshold": if err := dec.Decode(&s.WaitTimeThreshold); err != nil { - return err + return fmt.Errorf("%s | %w", "WaitTimeThreshold", err) } } diff --git a/typedapi/types/indexsettingstimeseries.go b/typedapi/types/indexsettingstimeseries.go index 7a8c067098..01ca697e73 100644 --- a/typedapi/types/indexsettingstimeseries.go +++ b/typedapi/types/indexsettingstimeseries.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // IndexSettingsTimeSeries type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L321-L324 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L325-L328 type IndexSettingsTimeSeries struct { EndTime DateTime `json:"end_time,omitempty"` StartTime DateTime `json:"start_time,omitempty"` @@ -52,12 +53,12 @@ func (s *IndexSettingsTimeSeries) UnmarshalJSON(data []byte) error { case "end_time": if err := dec.Decode(&s.EndTime); err != nil { - return err + return fmt.Errorf("%s | %w", "EndTime", err) } case "start_time": if err := dec.Decode(&s.StartTime); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTime", err) } } diff --git a/typedapi/types/indexstate.go b/typedapi/types/indexstate.go index fa1b984f23..f595429a8b 100644 --- a/typedapi/types/indexstate.go +++ b/typedapi/types/indexstate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // IndexState type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexState.ts#L27-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexState.ts#L27-L40 type IndexState struct { Aliases map[string]Alias `json:"aliases,omitempty"` DataStream *string `json:"data_stream,omitempty"` @@ -61,32 +62,32 @@ func (s *IndexState) UnmarshalJSON(data []byte) error { s.Aliases = make(map[string]Alias, 0) } if err := dec.Decode(&s.Aliases); err != nil { - return err + return fmt.Errorf("%s | %w", "Aliases", err) } case "data_stream": if err := dec.Decode(&s.DataStream); err != nil { - return err + return fmt.Errorf("%s | %w", "DataStream", err) } case "defaults": if err := dec.Decode(&s.Defaults); err != nil { - return err + return fmt.Errorf("%s | %w", "Defaults", err) } case "lifecycle": if err := dec.Decode(&s.Lifecycle); err != nil { - return err + return fmt.Errorf("%s | %w", "Lifecycle", err) } case "mappings": if err := dec.Decode(&s.Mappings); err != nil { - return err + return fmt.Errorf("%s | %w", "Mappings", err) } case "settings": if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } } diff --git a/typedapi/types/indexstats.go b/typedapi/types/indexstats.go index 3e2aa8d9ee..07afdb7779 100644 --- a/typedapi/types/indexstats.go +++ b/typedapi/types/indexstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndexStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L52-L93 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L52-L93 type IndexStats struct { Bulk *BulkStats `json:"bulk,omitempty"` // Completion Contains statistics about completions across all shards assigned to the node. diff --git a/typedapi/types/indextemplate.go b/typedapi/types/indextemplate.go index 9fc9d52fae..32496b6c4c 100644 --- a/typedapi/types/indextemplate.go +++ b/typedapi/types/indextemplate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndexTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexTemplate.ts#L31-L70 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexTemplate.ts#L31-L70 type IndexTemplate struct { AllowAutoCreate *bool `json:"allow_auto_create,omitempty"` // ComposedOf An ordered list of component template names. @@ -85,7 +86,7 @@ func (s *IndexTemplate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowAutoCreate", err) } s.AllowAutoCreate = &value case bool: @@ -94,12 +95,12 @@ func (s *IndexTemplate) UnmarshalJSON(data []byte) error { case "composed_of": if err := dec.Decode(&s.ComposedOf); err != nil { - return err + return fmt.Errorf("%s | %w", "ComposedOf", err) } case "data_stream": if err := dec.Decode(&s.DataStream); err != nil { - return err + return fmt.Errorf("%s | %w", "DataStream", err) } case "index_patterns": @@ -108,19 +109,19 @@ func (s *IndexTemplate) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPatterns", err) } s.IndexPatterns = append(s.IndexPatterns, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.IndexPatterns); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPatterns", err) } } case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "priority": @@ -130,7 +131,7 @@ func (s *IndexTemplate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Priority", err) } s.Priority = &value case float64: @@ -140,12 +141,12 @@ func (s *IndexTemplate) UnmarshalJSON(data []byte) error { case "template": if err := dec.Decode(&s.Template); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/indextemplatedatastreamconfiguration.go b/typedapi/types/indextemplatedatastreamconfiguration.go index 4f2b9fbd94..e03421fff8 100644 --- a/typedapi/types/indextemplatedatastreamconfiguration.go +++ b/typedapi/types/indextemplatedatastreamconfiguration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndexTemplateDataStreamConfiguration type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexTemplate.ts#L72-L83 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexTemplate.ts#L72-L83 type IndexTemplateDataStreamConfiguration struct { // AllowCustomRouting If true, the data stream supports custom routing. AllowCustomRouting *bool `json:"allow_custom_routing,omitempty"` @@ -60,7 +61,7 @@ func (s *IndexTemplateDataStreamConfiguration) UnmarshalJSON(data []byte) error case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowCustomRouting", err) } s.AllowCustomRouting = &value case bool: @@ -74,7 +75,7 @@ func (s *IndexTemplateDataStreamConfiguration) UnmarshalJSON(data []byte) error case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Hidden", err) } s.Hidden = &value case bool: diff --git a/typedapi/types/indextemplateitem.go b/typedapi/types/indextemplateitem.go index bc52880eb0..afebfe0328 100644 --- a/typedapi/types/indextemplateitem.go +++ b/typedapi/types/indextemplateitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // IndexTemplateItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get_index_template/IndicesGetIndexTemplateResponse.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get_index_template/IndicesGetIndexTemplateResponse.ts#L29-L32 type IndexTemplateItem struct { IndexTemplate IndexTemplate `json:"index_template"` Name string `json:"name"` @@ -52,12 +53,12 @@ func (s *IndexTemplateItem) UnmarshalJSON(data []byte) error { case "index_template": if err := dec.Decode(&s.IndexTemplate); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexTemplate", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/indextemplatemapping.go b/typedapi/types/indextemplatemapping.go index a7c9cbbc89..5e0e3ca09c 100644 --- a/typedapi/types/indextemplatemapping.go +++ b/typedapi/types/indextemplatemapping.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndexTemplateMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/put_index_template/IndicesPutIndexTemplateRequest.ts#L97-L119 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/put_index_template/IndicesPutIndexTemplateRequest.ts#L97-L119 type IndexTemplateMapping struct { // Aliases Aliases to add. // If the index template includes a `data_stream` object, these are data stream diff --git a/typedapi/types/indextemplatesummary.go b/typedapi/types/indextemplatesummary.go index 9cdeded39e..3777072b87 100644 --- a/typedapi/types/indextemplatesummary.go +++ b/typedapi/types/indextemplatesummary.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndexTemplateSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexTemplate.ts#L85-L107 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexTemplate.ts#L85-L107 type IndexTemplateSummary struct { // Aliases Aliases to add. // If the index template includes a `data_stream` object, these are data stream diff --git a/typedapi/types/indexversioning.go b/typedapi/types/indexversioning.go index 7b7b28ef4d..30bf0646f6 100644 --- a/typedapi/types/indexversioning.go +++ b/typedapi/types/indexversioning.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndexVersioning type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L265-L268 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L269-L272 type IndexVersioning struct { Created *string `json:"created,omitempty"` CreatedString *string `json:"created_string,omitempty"` @@ -53,13 +54,13 @@ func (s *IndexVersioning) UnmarshalJSON(data []byte) error { case "created": if err := dec.Decode(&s.Created); err != nil { - return err + return fmt.Errorf("%s | %w", "Created", err) } case "created_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CreatedString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/indicatornode.go b/typedapi/types/indicatornode.go index 9e4feb8a25..8441163096 100644 --- a/typedapi/types/indicatornode.go +++ b/typedapi/types/indicatornode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndicatorNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L90-L93 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L90-L93 type IndicatorNode struct { Name string `json:"name,omitempty"` NodeId string `json:"node_id,omitempty"` @@ -54,7 +55,7 @@ func (s *IndicatorNode) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,7 +67,7 @@ func (s *IndicatorNode) UnmarshalJSON(data []byte) error { case "node_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/indicators.go b/typedapi/types/indicators.go index 89320dae67..3eac14758a 100644 --- a/typedapi/types/indicators.go +++ b/typedapi/types/indicators.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Indicators type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L32-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L32-L40 type Indicators struct { Disk *DiskIndicator `json:"disk,omitempty"` Ilm *IlmIndicator `json:"ilm,omitempty"` diff --git a/typedapi/types/indices.go b/typedapi/types/indices.go index ddaefcb0e9..9099e22dd2 100644 --- a/typedapi/types/indices.go +++ b/typedapi/types/indices.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Indices type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L67-L67 type Indices []string diff --git a/typedapi/types/indicesaction.go b/typedapi/types/indicesaction.go index 6e9dd8c03f..1c95c915ff 100644 --- a/typedapi/types/indicesaction.go +++ b/typedapi/types/indicesaction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndicesAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/update_aliases/types.ts#L23-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/update_aliases/types.ts#L23-L39 type IndicesAction struct { // Add Adds a data stream or index to an alias. // If the alias doesn’t exist, the `add` action creates it. diff --git a/typedapi/types/indicesblockstatus.go b/typedapi/types/indicesblockstatus.go index 04fbde5582..8faeda7405 100644 --- a/typedapi/types/indicesblockstatus.go +++ b/typedapi/types/indicesblockstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndicesBlockStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/add_block/IndicesAddBlockResponse.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/add_block/IndicesAddBlockResponse.ts#L30-L33 type IndicesBlockStatus struct { Blocked bool `json:"blocked"` Name string `json:"name"` @@ -58,7 +59,7 @@ func (s *IndicesBlockStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Blocked", err) } s.Blocked = value case bool: @@ -67,7 +68,7 @@ func (s *IndicesBlockStatus) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/indicesindexingpressure.go b/typedapi/types/indicesindexingpressure.go index 277f150a1c..4934ec6b4c 100644 --- a/typedapi/types/indicesindexingpressure.go +++ b/typedapi/types/indicesindexingpressure.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndicesIndexingPressure type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L537-L539 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L541-L543 type IndicesIndexingPressure struct { Memory IndicesIndexingPressureMemory `json:"memory"` } diff --git a/typedapi/types/indicesindexingpressurememory.go b/typedapi/types/indicesindexingpressurememory.go index da105a9ba4..0625a4f355 100644 --- a/typedapi/types/indicesindexingpressurememory.go +++ b/typedapi/types/indicesindexingpressurememory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndicesIndexingPressureMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L541-L548 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L545-L552 type IndicesIndexingPressureMemory struct { // Limit Number of outstanding bytes that may be consumed by indexing requests. When // this limit is reached or exceeded, @@ -63,7 +64,7 @@ func (s *IndicesIndexingPressureMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Limit", err) } s.Limit = &value case float64: diff --git a/typedapi/types/indicesmodifyaction.go b/typedapi/types/indicesmodifyaction.go index 2d3b5bd509..aa23db5ef4 100644 --- a/typedapi/types/indicesmodifyaction.go +++ b/typedapi/types/indicesmodifyaction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndicesModifyAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/modify_data_stream/types.ts#L22-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/modify_data_stream/types.ts#L22-L37 type IndicesModifyAction struct { // AddBackingIndex Adds an existing index as a backing index for a data stream. // The index is hidden as part of this operation. diff --git a/typedapi/types/indicesoptions.go b/typedapi/types/indicesoptions.go index 86107529de..1405f50f08 100644 --- a/typedapi/types/indicesoptions.go +++ b/typedapi/types/indicesoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // IndicesOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L332-L359 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L332-L359 type IndicesOptions struct { // AllowNoIndices If false, the request returns an error if any wildcard expression, index // alias, or `_all` value targets only @@ -75,7 +76,7 @@ func (s *IndicesOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowNoIndices", err) } s.AllowNoIndices = &value case bool: @@ -88,13 +89,13 @@ func (s *IndicesOptions) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := &expandwildcard.ExpandWildcard{} if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "ExpandWildcards", err) } s.ExpandWildcards = append(s.ExpandWildcards, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.ExpandWildcards); err != nil { - return err + return fmt.Errorf("%s | %w", "ExpandWildcards", err) } } @@ -105,7 +106,7 @@ func (s *IndicesOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreThrottled", err) } s.IgnoreThrottled = &value case bool: @@ -119,7 +120,7 @@ func (s *IndicesOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnavailable", err) } s.IgnoreUnavailable = &value case bool: diff --git a/typedapi/types/indicesprivileges.go b/typedapi/types/indicesprivileges.go index c14b055681..f65c83ca74 100644 --- a/typedapi/types/indicesprivileges.go +++ b/typedapi/types/indicesprivileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // IndicesPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/Privileges.ts#L82-L105 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/Privileges.ts#L82-L105 type IndicesPrivileges struct { // AllowRestrictedIndices Set to `true` if using wildcard or regular expressions for patterns that // cover restricted indices. Implicitly, restricted indices have limited @@ -77,7 +78,7 @@ func (s *IndicesPrivileges) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowRestrictedIndices", err) } s.AllowRestrictedIndices = &value case bool: @@ -86,7 +87,7 @@ func (s *IndicesPrivileges) UnmarshalJSON(data []byte) error { case "field_security": if err := dec.Decode(&s.FieldSecurity); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldSecurity", err) } case "names": @@ -95,24 +96,24 @@ func (s *IndicesPrivileges) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Names", err) } s.Names = append(s.Names, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Names); err != nil { - return err + return fmt.Errorf("%s | %w", "Names", err) } } case "privileges": if err := dec.Decode(&s.Privileges); err != nil { - return err + return fmt.Errorf("%s | %w", "Privileges", err) } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } } diff --git a/typedapi/types/indicesprivilegesquery.go b/typedapi/types/indicesprivilegesquery.go index 9e5d2afa0a..680a36b453 100644 --- a/typedapi/types/indicesprivilegesquery.go +++ b/typedapi/types/indicesprivilegesquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ package types // Query // RoleTemplateQuery // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/Privileges.ts#L131-L139 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/Privileges.ts#L131-L139 type IndicesPrivilegesQuery interface{} diff --git a/typedapi/types/indicesrecord.go b/typedapi/types/indicesrecord.go index 33a872cafc..055ac451ee 100644 --- a/typedapi/types/indicesrecord.go +++ b/typedapi/types/indicesrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndicesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/indices/types.ts#L20-L801 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/indices/types.ts#L20-L801 type IndicesRecord struct { // BulkAvgSizeInBytes average size in bytes of shard bulk BulkAvgSizeInBytes *string `json:"bulk.avg_size_in_bytes,omitempty"` @@ -336,7 +337,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "bulk.avg_size_in_bytes", "basi", "bulkAvgSizeInBytes": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkAvgSizeInBytes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -348,7 +349,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "bulk.avg_time", "bati", "bulkAvgTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkAvgTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -360,7 +361,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "bulk.total_operations", "bto", "bulkTotalOperation": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkTotalOperations", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -372,7 +373,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "bulk.total_size_in_bytes", "btsi", "bulkTotalSizeInBytes": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkTotalSizeInBytes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -384,7 +385,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "bulk.total_time", "btti", "bulkTotalTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -396,7 +397,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "completion.size", "cs", "completionSize": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CompletionSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -408,7 +409,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "creation.date", "cd": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CreationDate", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -420,7 +421,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "creation.date.string", "cds": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CreationDateString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -432,7 +433,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "docs.count", "dc", "docsCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DocsCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -444,7 +445,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "docs.deleted", "dd", "docsDeleted": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DocsDeleted", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -456,7 +457,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "fielddata.evictions", "fe", "fielddataEvictions": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FielddataEvictions", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -468,7 +469,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "fielddata.memory_size", "fm", "fielddataMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FielddataMemorySize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -480,7 +481,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "flush.total", "ft", "flushTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FlushTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -492,7 +493,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "flush.total_time", "ftt", "flushTotalTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FlushTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -504,7 +505,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "get.current", "gc", "getCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -516,7 +517,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "get.exists_time", "geti", "getExistsTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetExistsTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -528,7 +529,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "get.exists_total", "geto", "getExistsTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetExistsTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -540,7 +541,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "get.missing_time", "gmti", "getMissingTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetMissingTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -552,7 +553,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "get.missing_total", "gmto", "getMissingTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetMissingTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -564,7 +565,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "get.time", "gti", "getTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -576,7 +577,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "get.total", "gto", "getTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -588,7 +589,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "health", "h": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Health", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -600,7 +601,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "index", "i", "idx": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -612,7 +613,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "indexing.delete_current", "idc", "indexingDeleteCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingDeleteCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -624,7 +625,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "indexing.delete_time", "idti", "indexingDeleteTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingDeleteTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -636,7 +637,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "indexing.delete_total", "idto", "indexingDeleteTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingDeleteTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -648,7 +649,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "indexing.index_current", "iic", "indexingIndexCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingIndexCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -660,7 +661,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "indexing.index_failed", "iif", "indexingIndexFailed": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingIndexFailed", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -672,7 +673,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "indexing.index_time", "iiti", "indexingIndexTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingIndexTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -684,7 +685,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "indexing.index_total", "iito", "indexingIndexTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingIndexTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -696,7 +697,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "memory.total", "tm", "memoryTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MemoryTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -708,7 +709,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "merges.current", "mc", "mergesCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -720,7 +721,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "merges.current_docs", "mcd", "mergesCurrentDocs": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesCurrentDocs", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -732,7 +733,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "merges.current_size", "mcs", "mergesCurrentSize": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesCurrentSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -744,7 +745,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "merges.total", "mt", "mergesTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -756,7 +757,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "merges.total_docs", "mtd", "mergesTotalDocs": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesTotalDocs", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -768,7 +769,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "merges.total_size", "mts", "mergesTotalSize": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesTotalSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -780,7 +781,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "merges.total_time", "mtt", "mergesTotalTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -792,7 +793,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri", "p", "shards.primary", "shardsPrimary": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pri", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -804,7 +805,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.bulk.avg_size_in_bytes": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriBulkAvgSizeInBytes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -816,7 +817,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.bulk.avg_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriBulkAvgTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -828,7 +829,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.bulk.total_operations": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriBulkTotalOperations", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -840,7 +841,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.bulk.total_size_in_bytes": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriBulkTotalSizeInBytes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -852,7 +853,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.bulk.total_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriBulkTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -864,7 +865,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.completion.size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriCompletionSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -876,7 +877,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.fielddata.evictions": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriFielddataEvictions", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -888,7 +889,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.fielddata.memory_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriFielddataMemorySize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -900,7 +901,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.flush.total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriFlushTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -912,7 +913,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.flush.total_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriFlushTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -924,7 +925,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.get.current": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriGetCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -936,7 +937,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.get.exists_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriGetExistsTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -948,7 +949,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.get.exists_total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriGetExistsTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -960,7 +961,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.get.missing_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriGetMissingTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -972,7 +973,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.get.missing_total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriGetMissingTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -984,7 +985,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.get.time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriGetTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -996,7 +997,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.get.total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriGetTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1008,7 +1009,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.indexing.delete_current": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriIndexingDeleteCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1020,7 +1021,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.indexing.delete_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriIndexingDeleteTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1032,7 +1033,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.indexing.delete_total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriIndexingDeleteTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1044,7 +1045,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.indexing.index_current": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriIndexingIndexCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1056,7 +1057,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.indexing.index_failed": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriIndexingIndexFailed", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1068,7 +1069,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.indexing.index_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriIndexingIndexTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1080,7 +1081,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.indexing.index_total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriIndexingIndexTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1092,7 +1093,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.memory.total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriMemoryTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1104,7 +1105,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.merges.current": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriMergesCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1116,7 +1117,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.merges.current_docs": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriMergesCurrentDocs", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1128,7 +1129,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.merges.current_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriMergesCurrentSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1140,7 +1141,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.merges.total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriMergesTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1152,7 +1153,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.merges.total_docs": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriMergesTotalDocs", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1164,7 +1165,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.merges.total_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriMergesTotalSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1176,7 +1177,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.merges.total_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriMergesTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1188,7 +1189,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.query_cache.evictions": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriQueryCacheEvictions", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1200,7 +1201,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.query_cache.memory_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriQueryCacheMemorySize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1212,7 +1213,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.refresh.external_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriRefreshExternalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1224,7 +1225,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.refresh.external_total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriRefreshExternalTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1236,7 +1237,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.refresh.listeners": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriRefreshListeners", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1248,7 +1249,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.refresh.time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriRefreshTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1260,7 +1261,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.refresh.total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriRefreshTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1272,7 +1273,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.request_cache.evictions": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriRequestCacheEvictions", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1284,7 +1285,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.request_cache.hit_count": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriRequestCacheHitCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1296,7 +1297,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.request_cache.memory_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriRequestCacheMemorySize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1308,7 +1309,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.request_cache.miss_count": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriRequestCacheMissCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1320,7 +1321,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.search.fetch_current": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSearchFetchCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1332,7 +1333,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.search.fetch_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSearchFetchTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1344,7 +1345,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.search.fetch_total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSearchFetchTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1356,7 +1357,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.search.open_contexts": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSearchOpenContexts", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1368,7 +1369,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.search.query_current": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSearchQueryCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1380,7 +1381,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.search.query_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSearchQueryTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1392,7 +1393,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.search.query_total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSearchQueryTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1404,7 +1405,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.search.scroll_current": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSearchScrollCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1416,7 +1417,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.search.scroll_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSearchScrollTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1428,7 +1429,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.search.scroll_total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSearchScrollTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1440,7 +1441,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.segments.count": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSegmentsCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1452,7 +1453,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.segments.fixed_bitset_memory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSegmentsFixedBitsetMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1464,7 +1465,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.segments.index_writer_memory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSegmentsIndexWriterMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1476,7 +1477,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.segments.memory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSegmentsMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1488,7 +1489,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.segments.version_map_memory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSegmentsVersionMapMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1500,7 +1501,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.store.size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriStoreSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1512,7 +1513,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.suggest.current": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSuggestCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1524,7 +1525,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.suggest.time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSuggestTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1536,7 +1537,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.suggest.total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriSuggestTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1548,7 +1549,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.warmer.current": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriWarmerCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1560,7 +1561,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.warmer.total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriWarmerTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1572,7 +1573,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "pri.warmer.total_time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PriWarmerTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1584,7 +1585,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "query_cache.evictions", "qce", "queryCacheEvictions": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryCacheEvictions", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1596,7 +1597,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "query_cache.memory_size", "qcm", "queryCacheMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryCacheMemorySize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1608,7 +1609,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "refresh.external_time", "reti": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshExternalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1620,7 +1621,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "refresh.external_total", "reto": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshExternalTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1632,7 +1633,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "refresh.listeners", "rli", "refreshListeners": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshListeners", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1644,7 +1645,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "refresh.time", "rti", "refreshTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1656,7 +1657,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "refresh.total", "rto", "refreshTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1668,7 +1669,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "rep", "r", "shards.replica", "shardsReplica": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Rep", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1680,7 +1681,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "request_cache.evictions", "rce", "requestCacheEvictions": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RequestCacheEvictions", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1692,7 +1693,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "request_cache.hit_count", "rchc", "requestCacheHitCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RequestCacheHitCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1704,7 +1705,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "request_cache.memory_size", "rcm", "requestCacheMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RequestCacheMemorySize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1716,7 +1717,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "request_cache.miss_count", "rcmc", "requestCacheMissCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RequestCacheMissCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1728,7 +1729,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "search.fetch_current", "sfc", "searchFetchCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchFetchCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1740,7 +1741,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "search.fetch_time", "sfti", "searchFetchTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchFetchTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1752,7 +1753,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "search.fetch_total", "sfto", "searchFetchTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchFetchTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1764,7 +1765,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "search.open_contexts", "so", "searchOpenContexts": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchOpenContexts", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1776,7 +1777,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "search.query_current", "sqc", "searchQueryCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchQueryCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1788,7 +1789,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "search.query_time", "sqti", "searchQueryTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchQueryTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1800,7 +1801,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "search.query_total", "sqto", "searchQueryTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchQueryTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1812,7 +1813,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "search.scroll_current", "scc", "searchScrollCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchScrollCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1824,7 +1825,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "search.scroll_time", "scti", "searchScrollTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchScrollTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1836,7 +1837,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "search.scroll_total", "scto", "searchScrollTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchScrollTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1848,7 +1849,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "search.throttled", "sth": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchThrottled", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1860,7 +1861,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "segments.count", "sc", "segmentsCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1872,7 +1873,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "segments.fixed_bitset_memory", "sfbm", "fixedBitsetMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsFixedBitsetMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1884,7 +1885,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "segments.index_writer_memory", "siwm", "segmentsIndexWriterMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsIndexWriterMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1896,7 +1897,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "segments.memory", "sm", "segmentsMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1908,7 +1909,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "segments.version_map_memory", "svmm", "segmentsVersionMapMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsVersionMapMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1920,7 +1921,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "status", "s": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1932,7 +1933,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "store.size", "ss", "storeSize": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "StoreSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1944,7 +1945,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "suggest.current", "suc", "suggestCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SuggestCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1956,7 +1957,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "suggest.time", "suti", "suggestTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SuggestTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1968,7 +1969,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "suggest.total", "suto", "suggestTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SuggestTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1980,7 +1981,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "uuid", "id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Uuid", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1992,7 +1993,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "warmer.current", "wc", "warmerCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "WarmerCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -2004,7 +2005,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "warmer.total", "wto", "warmerTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "WarmerTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -2016,7 +2017,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { case "warmer.total_time", "wtt", "warmerTotalTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "WarmerTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/indicesshardsstats.go b/typedapi/types/indicesshardsstats.go index 01eeea400b..77aa471f9a 100644 --- a/typedapi/types/indicesshardsstats.go +++ b/typedapi/types/indicesshardsstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndicesShardsStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L49-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L49-L52 type IndicesShardsStats struct { AllFields FieldSummary `json:"all_fields"` Fields map[string]FieldSummary `json:"fields"` diff --git a/typedapi/types/indicesshardstats.go b/typedapi/types/indicesshardstats.go index 8f9bf5b12e..30ac2d0f3e 100644 --- a/typedapi/types/indicesshardstats.go +++ b/typedapi/types/indicesshardstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // IndicesShardStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L192-L223 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L192-L223 type IndicesShardStats struct { Bulk *BulkStats `json:"bulk,omitempty"` Commit *ShardCommit `json:"commit,omitempty"` diff --git a/typedapi/types/indicesshardstores.go b/typedapi/types/indicesshardstores.go index faa576913b..b1f6882fc2 100644 --- a/typedapi/types/indicesshardstores.go +++ b/typedapi/types/indicesshardstores.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IndicesShardStores type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/shard_stores/types.ts#L26-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/shard_stores/types.ts#L26-L28 type IndicesShardStores struct { Shards map[string]ShardStoreWrapper `json:"shards"` } diff --git a/typedapi/types/indicesstats.go b/typedapi/types/indicesstats.go index d99c2ff861..67ca9beb1e 100644 --- a/typedapi/types/indicesstats.go +++ b/typedapi/types/indicesstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/healthstatus" @@ -32,7 +33,7 @@ import ( // IndicesStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L95-L110 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L95-L110 type IndicesStats struct { Health *healthstatus.HealthStatus `json:"health,omitempty"` Primaries *IndexStats `json:"primaries,omitempty"` @@ -59,12 +60,12 @@ func (s *IndicesStats) UnmarshalJSON(data []byte) error { case "health": if err := dec.Decode(&s.Health); err != nil { - return err + return fmt.Errorf("%s | %w", "Health", err) } case "primaries": if err := dec.Decode(&s.Primaries); err != nil { - return err + return fmt.Errorf("%s | %w", "Primaries", err) } case "shards": @@ -72,22 +73,22 @@ func (s *IndicesStats) UnmarshalJSON(data []byte) error { s.Shards = make(map[string][]IndicesShardStats, 0) } if err := dec.Decode(&s.Shards); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "total": if err := dec.Decode(&s.Total); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } case "uuid": if err := dec.Decode(&s.Uuid); err != nil { - return err + return fmt.Errorf("%s | %w", "Uuid", err) } } diff --git a/typedapi/types/indicesvalidationexplanation.go b/typedapi/types/indicesvalidationexplanation.go index 5eeab5d358..ed33f27d7c 100644 --- a/typedapi/types/indicesvalidationexplanation.go +++ b/typedapi/types/indicesvalidationexplanation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndicesValidationExplanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/validate_query/IndicesValidateQueryResponse.ts#L32-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/validate_query/IndicesValidateQueryResponse.ts#L32-L37 type IndicesValidationExplanation struct { Error *string `json:"error,omitempty"` Explanation *string `json:"explanation,omitempty"` @@ -56,7 +57,7 @@ func (s *IndicesValidationExplanation) UnmarshalJSON(data []byte) error { case "error": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Error", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,7 +69,7 @@ func (s *IndicesValidationExplanation) UnmarshalJSON(data []byte) error { case "explanation": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Explanation", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *IndicesValidationExplanation) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "valid": @@ -89,7 +90,7 @@ func (s *IndicesValidationExplanation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Valid", err) } s.Valid = value case bool: diff --git a/typedapi/types/indicesversions.go b/typedapi/types/indicesversions.go index 8135cd2f9e..876174e0d3 100644 --- a/typedapi/types/indicesversions.go +++ b/typedapi/types/indicesversions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IndicesVersions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L263-L268 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L263-L268 type IndicesVersions struct { IndexCount int `json:"index_count"` PrimaryShardCount int `json:"primary_shard_count"` @@ -61,7 +62,7 @@ func (s *IndicesVersions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexCount", err) } s.IndexCount = value case float64: @@ -77,7 +78,7 @@ func (s *IndicesVersions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryShardCount", err) } s.PrimaryShardCount = value case float64: @@ -92,7 +93,7 @@ func (s *IndicesVersions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalPrimaryBytes", err) } s.TotalPrimaryBytes = value case float64: @@ -102,7 +103,7 @@ func (s *IndicesVersions) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/inferenceaggregate.go b/typedapi/types/inferenceaggregate.go index bf620c2659..8e9521d0b5 100644 --- a/typedapi/types/inferenceaggregate.go +++ b/typedapi/types/inferenceaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -31,7 +31,7 @@ import ( // InferenceAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L659-L670 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L659-L670 type InferenceAggregate struct { Data map[string]json.RawMessage `json:"-"` FeatureImportance []InferenceFeatureImportance `json:"feature_importance,omitempty"` @@ -58,28 +58,28 @@ func (s *InferenceAggregate) UnmarshalJSON(data []byte) error { case "feature_importance": if err := dec.Decode(&s.FeatureImportance); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureImportance", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "top_classes": if err := dec.Decode(&s.TopClasses); err != nil { - return err + return fmt.Errorf("%s | %w", "TopClasses", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "warning": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Warning", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -96,7 +96,7 @@ func (s *InferenceAggregate) UnmarshalJSON(data []byte) error { } raw := new(json.RawMessage) if err := dec.Decode(&raw); err != nil { - return err + return fmt.Errorf("%s | %w", "Data", err) } s.Data[key] = *raw } diff --git a/typedapi/types/inferenceaggregation.go b/typedapi/types/inferenceaggregation.go index 09161f8a1d..4e718b4b60 100644 --- a/typedapi/types/inferenceaggregation.go +++ b/typedapi/types/inferenceaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // InferenceAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L205-L214 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L205-L214 type InferenceAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -67,13 +68,13 @@ func (s *InferenceAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,28 +85,28 @@ func (s *InferenceAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "inference_config": if err := dec.Decode(&s.InferenceConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "InferenceConfig", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "model_id": if err := dec.Decode(&s.ModelId); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelId", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/inferenceclassimportance.go b/typedapi/types/inferenceclassimportance.go index 63bb58e84c..4a7d058853 100644 --- a/typedapi/types/inferenceclassimportance.go +++ b/typedapi/types/inferenceclassimportance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // InferenceClassImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L684-L687 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L684-L687 type InferenceClassImportance struct { ClassName string `json:"class_name"` Importance Float64 `json:"importance"` @@ -54,7 +55,7 @@ func (s *InferenceClassImportance) UnmarshalJSON(data []byte) error { case "class_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -70,7 +71,7 @@ func (s *InferenceClassImportance) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Importance", err) } f := Float64(value) s.Importance = f diff --git a/typedapi/types/inferenceconfig.go b/typedapi/types/inferenceconfig.go index d829806c48..86cd84fa44 100644 --- a/typedapi/types/inferenceconfig.go +++ b/typedapi/types/inferenceconfig.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // InferenceConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L746-L758 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L746-L758 type InferenceConfig struct { // Classification Classification configuration for inference. Classification *InferenceConfigClassification `json:"classification,omitempty"` diff --git a/typedapi/types/inferenceconfigclassification.go b/typedapi/types/inferenceconfigclassification.go index d897426d3f..0eac6c41f2 100644 --- a/typedapi/types/inferenceconfigclassification.go +++ b/typedapi/types/inferenceconfigclassification.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // InferenceConfigClassification type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L773-L799 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L773-L799 type InferenceConfigClassification struct { // NumTopClasses Specifies the number of top class predictions to return. NumTopClasses *int `json:"num_top_classes,omitempty"` @@ -69,7 +70,7 @@ func (s *InferenceConfigClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopClasses", err) } s.NumTopClasses = &value case float64: @@ -85,7 +86,7 @@ func (s *InferenceConfigClassification) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopFeatureImportanceValues", err) } s.NumTopFeatureImportanceValues = &value case float64: @@ -96,7 +97,7 @@ func (s *InferenceConfigClassification) UnmarshalJSON(data []byte) error { case "prediction_field_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PredictionFieldType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -107,12 +108,12 @@ func (s *InferenceConfigClassification) UnmarshalJSON(data []byte) error { case "results_field": if err := dec.Decode(&s.ResultsField); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } case "top_classes_results_field": if err := dec.Decode(&s.TopClassesResultsField); err != nil { - return err + return fmt.Errorf("%s | %w", "TopClassesResultsField", err) } } diff --git a/typedapi/types/inferenceconfigcontainer.go b/typedapi/types/inferenceconfigcontainer.go index b1ef2bdb97..6a5e6bf519 100644 --- a/typedapi/types/inferenceconfigcontainer.go +++ b/typedapi/types/inferenceconfigcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // InferenceConfigContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L216-L222 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L216-L222 type InferenceConfigContainer struct { // Classification Classification configuration for inference. Classification *ClassificationInferenceOptions `json:"classification,omitempty"` diff --git a/typedapi/types/inferenceconfigcreatecontainer.go b/typedapi/types/inferenceconfigcreatecontainer.go index da80aa6700..5a19fed9a6 100644 --- a/typedapi/types/inferenceconfigcreatecontainer.go +++ b/typedapi/types/inferenceconfigcreatecontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // InferenceConfigCreateContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L23-L80 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L23-L80 type InferenceConfigCreateContainer struct { // Classification Classification configuration for inference. Classification *ClassificationInferenceOptions `json:"classification,omitempty"` diff --git a/typedapi/types/inferenceconfigregression.go b/typedapi/types/inferenceconfigregression.go index b9f7e96b15..be7b49612b 100644 --- a/typedapi/types/inferenceconfigregression.go +++ b/typedapi/types/inferenceconfigregression.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // InferenceConfigRegression type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L760-L771 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L760-L771 type InferenceConfigRegression struct { // NumTopFeatureImportanceValues Specifies the maximum number of feature importance values per document. NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` @@ -62,7 +63,7 @@ func (s *InferenceConfigRegression) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopFeatureImportanceValues", err) } s.NumTopFeatureImportanceValues = &value case float64: @@ -72,7 +73,7 @@ func (s *InferenceConfigRegression) UnmarshalJSON(data []byte) error { case "results_field": if err := dec.Decode(&s.ResultsField); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } } diff --git a/typedapi/types/inferenceconfigupdatecontainer.go b/typedapi/types/inferenceconfigupdatecontainer.go index 3153d328bd..7c8d591785 100644 --- a/typedapi/types/inferenceconfigupdatecontainer.go +++ b/typedapi/types/inferenceconfigupdatecontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // InferenceConfigUpdateContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L296-L318 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L296-L318 type InferenceConfigUpdateContainer struct { // Classification Classification configuration for inference. Classification *ClassificationInferenceOptions `json:"classification,omitempty"` diff --git a/typedapi/types/inferencefeatureimportance.go b/typedapi/types/inferencefeatureimportance.go index 51843dffc5..fddc7f385f 100644 --- a/typedapi/types/inferencefeatureimportance.go +++ b/typedapi/types/inferencefeatureimportance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // InferenceFeatureImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L678-L682 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L678-L682 type InferenceFeatureImportance struct { Classes []InferenceClassImportance `json:"classes,omitempty"` FeatureName string `json:"feature_name"` @@ -54,13 +55,13 @@ func (s *InferenceFeatureImportance) UnmarshalJSON(data []byte) error { case "classes": if err := dec.Decode(&s.Classes); err != nil { - return err + return fmt.Errorf("%s | %w", "Classes", err) } case "feature_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -76,7 +77,7 @@ func (s *InferenceFeatureImportance) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Importance", err) } f := Float64(value) s.Importance = &f diff --git a/typedapi/types/inferenceprocessor.go b/typedapi/types/inferenceprocessor.go index 48a973c658..9262be8992 100644 --- a/typedapi/types/inferenceprocessor.go +++ b/typedapi/types/inferenceprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // InferenceProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L725-L744 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L725-L744 type InferenceProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -74,7 +75,7 @@ func (s *InferenceProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -88,13 +89,13 @@ func (s *InferenceProcessor) UnmarshalJSON(data []byte) error { s.FieldMap = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.FieldMap); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldMap", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -110,7 +111,7 @@ func (s *InferenceProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -119,23 +120,23 @@ func (s *InferenceProcessor) UnmarshalJSON(data []byte) error { case "inference_config": if err := dec.Decode(&s.InferenceConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "InferenceConfig", err) } case "model_id": if err := dec.Decode(&s.ModelId); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelId", err) } case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -146,7 +147,7 @@ func (s *InferenceProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/inferenceresponseresult.go b/typedapi/types/inferenceresponseresult.go index 894eaa3894..ed0c606a40 100644 --- a/typedapi/types/inferenceresponseresult.go +++ b/typedapi/types/inferenceresponseresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // InferenceResponseResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L459-L506 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L459-L506 type InferenceResponseResult struct { // Entities If the model is trained for named entity recognition (NER) tasks, the // response contains the recognized entities. @@ -89,12 +90,12 @@ func (s *InferenceResponseResult) UnmarshalJSON(data []byte) error { case "entities": if err := dec.Decode(&s.Entities); err != nil { - return err + return fmt.Errorf("%s | %w", "Entities", err) } case "feature_importance": if err := dec.Decode(&s.FeatureImportance); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureImportance", err) } case "is_truncated": @@ -104,7 +105,7 @@ func (s *InferenceResponseResult) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsTruncated", err) } s.IsTruncated = &value case bool: @@ -113,13 +114,13 @@ func (s *InferenceResponseResult) UnmarshalJSON(data []byte) error { case "predicted_value": if err := dec.Decode(&s.PredictedValue); err != nil { - return err + return fmt.Errorf("%s | %w", "PredictedValue", err) } case "predicted_value_sequence": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PredictedValueSequence", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -135,7 +136,7 @@ func (s *InferenceResponseResult) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PredictionProbability", err) } f := Float64(value) s.PredictionProbability = &f @@ -151,7 +152,7 @@ func (s *InferenceResponseResult) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PredictionScore", err) } f := Float64(value) s.PredictionScore = &f @@ -162,13 +163,13 @@ func (s *InferenceResponseResult) UnmarshalJSON(data []byte) error { case "top_classes": if err := dec.Decode(&s.TopClasses); err != nil { - return err + return fmt.Errorf("%s | %w", "TopClasses", err) } case "warning": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Warning", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/inferenceresult.go b/typedapi/types/inferenceresult.go index a1c9c0132f..569b40cf28 100644 --- a/typedapi/types/inferenceresult.go +++ b/typedapi/types/inferenceresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // InferenceResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/_types/Results.ts#L59-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/_types/Results.ts#L59-L67 type InferenceResult struct { SparseEmbedding []SparseEmbeddingResult `json:"sparse_embedding,omitempty"` TextEmbedding []TextEmbeddingResult `json:"text_embedding,omitempty"` diff --git a/typedapi/types/inferencetopclassentry.go b/typedapi/types/inferencetopclassentry.go index e2f31f99da..25688dfeab 100644 --- a/typedapi/types/inferencetopclassentry.go +++ b/typedapi/types/inferencetopclassentry.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // InferenceTopClassEntry type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L672-L676 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L672-L676 type InferenceTopClassEntry struct { ClassName FieldValue `json:"class_name"` ClassProbability Float64 `json:"class_probability"` @@ -54,7 +55,7 @@ func (s *InferenceTopClassEntry) UnmarshalJSON(data []byte) error { case "class_name": if err := dec.Decode(&s.ClassName); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassName", err) } case "class_probability": @@ -64,7 +65,7 @@ func (s *InferenceTopClassEntry) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ClassProbability", err) } f := Float64(value) s.ClassProbability = f @@ -80,7 +81,7 @@ func (s *InferenceTopClassEntry) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ClassScore", err) } f := Float64(value) s.ClassScore = f diff --git a/typedapi/types/influence.go b/typedapi/types/influence.go index 917fc9b1a9..da4d8d92f0 100644 --- a/typedapi/types/influence.go +++ b/typedapi/types/influence.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Influence type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Anomaly.ts#L140-L143 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Anomaly.ts#L140-L143 type Influence struct { InfluencerFieldName string `json:"influencer_field_name"` InfluencerFieldValues []string `json:"influencer_field_values"` @@ -54,7 +55,7 @@ func (s *Influence) UnmarshalJSON(data []byte) error { case "influencer_field_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "InfluencerFieldName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -65,7 +66,7 @@ func (s *Influence) UnmarshalJSON(data []byte) error { case "influencer_field_values": if err := dec.Decode(&s.InfluencerFieldValues); err != nil { - return err + return fmt.Errorf("%s | %w", "InfluencerFieldValues", err) } } diff --git a/typedapi/types/influencer.go b/typedapi/types/influencer.go index d4a2c12f8f..59a3456f21 100644 --- a/typedapi/types/influencer.go +++ b/typedapi/types/influencer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Influencer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Influencer.ts#L31-L83 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Influencer.ts#L31-L83 type Influencer struct { // BucketSpan The length of the bucket in seconds. This value matches the bucket span that // is specified in the job. @@ -90,13 +91,13 @@ func (s *Influencer) UnmarshalJSON(data []byte) error { case "bucket_span": if err := dec.Decode(&s.BucketSpan); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketSpan", err) } case "foo": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Foo", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -107,13 +108,13 @@ func (s *Influencer) UnmarshalJSON(data []byte) error { case "influencer_field_name": if err := dec.Decode(&s.InfluencerFieldName); err != nil { - return err + return fmt.Errorf("%s | %w", "InfluencerFieldName", err) } case "influencer_field_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "InfluencerFieldValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -129,7 +130,7 @@ func (s *Influencer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "InfluencerScore", err) } f := Float64(value) s.InfluencerScore = f @@ -145,7 +146,7 @@ func (s *Influencer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "InitialInfluencerScore", err) } f := Float64(value) s.InitialInfluencerScore = f @@ -161,7 +162,7 @@ func (s *Influencer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsInterim", err) } s.IsInterim = value case bool: @@ -170,7 +171,7 @@ func (s *Influencer) UnmarshalJSON(data []byte) error { case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "probability": @@ -180,7 +181,7 @@ func (s *Influencer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Probability", err) } f := Float64(value) s.Probability = f @@ -192,7 +193,7 @@ func (s *Influencer) UnmarshalJSON(data []byte) error { case "result_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -203,7 +204,7 @@ func (s *Influencer) UnmarshalJSON(data []byte) error { case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } } diff --git a/typedapi/types/infofeaturestate.go b/typedapi/types/infofeaturestate.go index 8b4fc76607..eea230cc96 100644 --- a/typedapi/types/infofeaturestate.go +++ b/typedapi/types/infofeaturestate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // InfoFeatureState type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotInfoFeatureState.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotInfoFeatureState.ts#L22-L25 type InfoFeatureState struct { FeatureName string `json:"feature_name"` Indices []string `json:"indices"` @@ -54,7 +55,7 @@ func (s *InfoFeatureState) UnmarshalJSON(data []byte) error { case "feature_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -69,13 +70,13 @@ func (s *InfoFeatureState) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } diff --git a/typedapi/types/ingestpipeline.go b/typedapi/types/ingestpipeline.go index 68d7ed4fdd..1e64160656 100644 --- a/typedapi/types/ingestpipeline.go +++ b/typedapi/types/ingestpipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,19 +24,20 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IngestPipeline type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Pipeline.ts#L23-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Pipeline.ts#L23-L45 type IngestPipeline struct { // Description Description of the ingest pipeline. Description *string `json:"description,omitempty"` // Meta_ Arbitrary metadata about the ingest pipeline. This map is not automatically // generated by Elasticsearch. - Meta_ Metadata `json:"_meta"` + Meta_ Metadata `json:"_meta,omitempty"` // OnFailure Processors to run immediately after a processor failure. OnFailure []ProcessorContainer `json:"on_failure,omitempty"` // Processors Processors used to perform transformations on documents before indexing. @@ -64,7 +65,7 @@ func (s *IngestPipeline) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -75,22 +76,22 @@ func (s *IngestPipeline) UnmarshalJSON(data []byte) error { case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "processors": if err := dec.Decode(&s.Processors); err != nil { - return err + return fmt.Errorf("%s | %w", "Processors", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/ingesttotal.go b/typedapi/types/ingesttotal.go index f66d6efbd6..668ec1acb7 100644 --- a/typedapi/types/ingesttotal.go +++ b/typedapi/types/ingesttotal.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IngestTotal type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L356-L377 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L356-L377 type IngestTotal struct { // Count Total number of documents ingested during the lifetime of this node. Count *int64 `json:"count,omitempty"` @@ -67,7 +68,7 @@ func (s *IngestTotal) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = &value case float64: @@ -82,7 +83,7 @@ func (s *IngestTotal) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Current", err) } s.Current = &value case float64: @@ -97,7 +98,7 @@ func (s *IngestTotal) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Failed", err) } s.Failed = &value case float64: @@ -107,12 +108,12 @@ func (s *IngestTotal) UnmarshalJSON(data []byte) error { case "processors": if err := dec.Decode(&s.Processors); err != nil { - return err + return fmt.Errorf("%s | %w", "Processors", err) } case "time_in_millis": if err := dec.Decode(&s.TimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeInMillis", err) } } diff --git a/typedapi/types/inlineget.go b/typedapi/types/inlineget.go index fc7da88581..e31088db74 100644 --- a/typedapi/types/inlineget.go +++ b/typedapi/types/inlineget.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -31,7 +31,7 @@ import ( // InlineGet type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L321-L330 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L321-L330 type InlineGet struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` @@ -62,7 +62,7 @@ func (s *InlineGet) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "found": @@ -72,7 +72,7 @@ func (s *InlineGet) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Found", err) } s.Found = value case bool: @@ -86,7 +86,7 @@ func (s *InlineGet) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryTerm_", err) } s.PrimaryTerm_ = &value case float64: @@ -96,17 +96,17 @@ func (s *InlineGet) UnmarshalJSON(data []byte) error { case "_routing": if err := dec.Decode(&s.Routing_); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing_", err) } case "_seq_no": if err := dec.Decode(&s.SeqNo_); err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNo_", err) } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } default: @@ -117,7 +117,7 @@ func (s *InlineGet) UnmarshalJSON(data []byte) error { } raw := new(json.RawMessage) if err := dec.Decode(&raw); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } s.Metadata[key] = *raw } diff --git a/typedapi/types/inlinegetdictuserdefined.go b/typedapi/types/inlinegetdictuserdefined.go index 96dcb9307e..7e83ee372f 100644 --- a/typedapi/types/inlinegetdictuserdefined.go +++ b/typedapi/types/inlinegetdictuserdefined.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -31,7 +31,7 @@ import ( // InlineGetDictUserDefined type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L321-L330 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L321-L330 type InlineGetDictUserDefined struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` @@ -39,7 +39,7 @@ type InlineGetDictUserDefined struct { PrimaryTerm_ *int64 `json:"_primary_term,omitempty"` Routing_ *string `json:"_routing,omitempty"` SeqNo_ *int64 `json:"_seq_no,omitempty"` - Source_ map[string]json.RawMessage `json:"_source"` + Source_ map[string]json.RawMessage `json:"_source,omitempty"` } func (s *InlineGetDictUserDefined) UnmarshalJSON(data []byte) error { @@ -62,7 +62,7 @@ func (s *InlineGetDictUserDefined) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "found": @@ -72,7 +72,7 @@ func (s *InlineGetDictUserDefined) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Found", err) } s.Found = value case bool: @@ -86,7 +86,7 @@ func (s *InlineGetDictUserDefined) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryTerm_", err) } s.PrimaryTerm_ = &value case float64: @@ -96,12 +96,12 @@ func (s *InlineGetDictUserDefined) UnmarshalJSON(data []byte) error { case "_routing": if err := dec.Decode(&s.Routing_); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing_", err) } case "_seq_no": if err := dec.Decode(&s.SeqNo_); err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNo_", err) } case "_source": @@ -109,7 +109,7 @@ func (s *InlineGetDictUserDefined) UnmarshalJSON(data []byte) error { s.Source_ = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } default: @@ -120,7 +120,7 @@ func (s *InlineGetDictUserDefined) UnmarshalJSON(data []byte) error { } raw := new(json.RawMessage) if err := dec.Decode(&raw); err != nil { - return err + return fmt.Errorf("%s | %w", "InlineGetDictUserDefined", err) } s.InlineGetDictUserDefined[key] = *raw } diff --git a/typedapi/types/inlinescript.go b/typedapi/types/inlinescript.go index a2c0372f53..465e684c25 100644 --- a/typedapi/types/inlinescript.go +++ b/typedapi/types/inlinescript.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // InlineScript type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Scripting.ts#L67-L79 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Scripting.ts#L67-L79 type InlineScript struct { // Lang Specifies the language the script is written in. Lang *scriptlanguage.ScriptLanguage `json:"lang,omitempty"` @@ -70,7 +71,7 @@ func (s *InlineScript) UnmarshalJSON(data []byte) error { case "lang": if err := dec.Decode(&s.Lang); err != nil { - return err + return fmt.Errorf("%s | %w", "Lang", err) } case "options": @@ -78,7 +79,7 @@ func (s *InlineScript) UnmarshalJSON(data []byte) error { s.Options = make(map[string]string, 0) } if err := dec.Decode(&s.Options); err != nil { - return err + return fmt.Errorf("%s | %w", "Options", err) } case "params": @@ -86,13 +87,13 @@ func (s *InlineScript) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "source": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/innerhits.go b/typedapi/types/innerhits.go index d23c74f4cd..186d464433 100644 --- a/typedapi/types/innerhits.go +++ b/typedapi/types/innerhits.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // InnerHits type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/hits.ts#L106-L140 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/hits.ts#L106-L140 type InnerHits struct { Collapse *FieldCollapse `json:"collapse,omitempty"` DocvalueFields []FieldAndFormat `json:"docvalue_fields,omitempty"` @@ -49,11 +50,11 @@ type InnerHits struct { Size *int `json:"size,omitempty"` // Sort How the inner hits should be sorted per `inner_hits`. // By default, inner hits are sorted by score. - Sort []SortCombinations `json:"sort,omitempty"` - Source_ SourceConfig `json:"_source,omitempty"` - StoredField []string `json:"stored_field,omitempty"` - TrackScores *bool `json:"track_scores,omitempty"` - Version *bool `json:"version,omitempty"` + Sort []SortCombinations `json:"sort,omitempty"` + Source_ SourceConfig `json:"_source,omitempty"` + StoredFields []string `json:"stored_fields,omitempty"` + TrackScores *bool `json:"track_scores,omitempty"` + Version *bool `json:"version,omitempty"` } func (s *InnerHits) UnmarshalJSON(data []byte) error { @@ -73,12 +74,12 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { case "collapse": if err := dec.Decode(&s.Collapse); err != nil { - return err + return fmt.Errorf("%s | %w", "Collapse", err) } case "docvalue_fields": if err := dec.Decode(&s.DocvalueFields); err != nil { - return err + return fmt.Errorf("%s | %w", "DocvalueFields", err) } case "explain": @@ -88,7 +89,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Explain", err) } s.Explain = &value case bool: @@ -101,13 +102,13 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } s.Fields = append(s.Fields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } } @@ -119,7 +120,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } s.From = &value case float64: @@ -129,7 +130,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { case "highlight": if err := dec.Decode(&s.Highlight); err != nil { - return err + return fmt.Errorf("%s | %w", "Highlight", err) } case "ignore_unmapped": @@ -139,7 +140,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnmapped", err) } s.IgnoreUnmapped = &value case bool: @@ -148,7 +149,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "script_fields": @@ -156,7 +157,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { s.ScriptFields = make(map[string]ScriptField, 0) } if err := dec.Decode(&s.ScriptFields); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptFields", err) } case "seq_no_primary_term": @@ -166,7 +167,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNoPrimaryTerm", err) } s.SeqNoPrimaryTerm = &value case bool: @@ -181,7 +182,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -195,34 +196,34 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(SortCombinations) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } s.Sort = append(s.Sort, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } - case "stored_field": + case "stored_fields": rawMsg := json.RawMessage{} dec.Decode(&rawMsg) if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } - s.StoredField = append(s.StoredField, *o) + s.StoredFields = append(s.StoredFields, *o) } else { - if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StoredField); err != nil { - return err + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StoredFields); err != nil { + return fmt.Errorf("%s | %w", "StoredFields", err) } } @@ -233,7 +234,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TrackScores", err) } s.TrackScores = &value case bool: @@ -247,7 +248,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } s.Version = &value case bool: diff --git a/typedapi/types/innerhitsresult.go b/typedapi/types/innerhitsresult.go index f987ad3202..f1a4bf42c2 100644 --- a/typedapi/types/innerhitsresult.go +++ b/typedapi/types/innerhitsresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // InnerHitsResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/hits.ts#L84-L86 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/hits.ts#L84-L86 type InnerHitsResult struct { Hits *HitsMetadata `json:"hits,omitempty"` } diff --git a/typedapi/types/inprogress.go b/typedapi/types/inprogress.go index 4da7f65f62..9085f61abb 100644 --- a/typedapi/types/inprogress.go +++ b/typedapi/types/inprogress.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // InProgress type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/_types/SnapshotLifecycle.ts#L131-L136 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/_types/SnapshotLifecycle.ts#L131-L136 type InProgress struct { Name string `json:"name"` StartTimeMillis int64 `json:"start_time_millis"` @@ -55,18 +56,18 @@ func (s *InProgress) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "start_time_millis": if err := dec.Decode(&s.StartTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTimeMillis", err) } case "state": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -77,7 +78,7 @@ func (s *InProgress) UnmarshalJSON(data []byte) error { case "uuid": if err := dec.Decode(&s.Uuid); err != nil { - return err + return fmt.Errorf("%s | %w", "Uuid", err) } } diff --git a/typedapi/types/input.go b/typedapi/types/input.go index 2d3b487b4b..87ec622736 100644 --- a/typedapi/types/input.go +++ b/typedapi/types/input.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Input type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/types.ts#L56-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/types.ts#L56-L58 type Input struct { FieldNames []string `json:"field_names"` } @@ -55,13 +56,13 @@ func (s *Input) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldNames", err) } s.FieldNames = append(s.FieldNames, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.FieldNames); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldNames", err) } } diff --git a/typedapi/types/integernumberproperty.go b/typedapi/types/integernumberproperty.go index df6d6d970e..31cac16c3e 100644 --- a/typedapi/types/integernumberproperty.go +++ b/typedapi/types/integernumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // IntegerNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L149-L152 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L149-L152 type IntegerNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -84,7 +85,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -100,7 +101,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -113,13 +114,13 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -130,7 +131,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -139,7 +140,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -457,7 +458,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -472,7 +473,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -486,7 +487,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -498,7 +499,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": @@ -509,7 +510,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } s.NullValue = &value case float64: @@ -519,7 +520,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case "on_script_error": if err := dec.Decode(&s.OnScriptError); err != nil { - return err + return fmt.Errorf("%s | %w", "OnScriptError", err) } case "properties": @@ -832,7 +833,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -841,7 +842,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -850,7 +851,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -858,7 +859,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -868,7 +869,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -884,7 +885,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -898,7 +899,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesDimension", err) } s.TimeSeriesDimension = &value case bool: @@ -907,12 +908,12 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case "time_series_metric": if err := dec.Decode(&s.TimeSeriesMetric); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesMetric", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/integerrangeproperty.go b/typedapi/types/integerrangeproperty.go index f103aef9d4..dc26d2a175 100644 --- a/typedapi/types/integerrangeproperty.go +++ b/typedapi/types/integerrangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // IntegerRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/range.ts#L42-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/range.ts#L42-L44 type IntegerRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -72,7 +73,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -88,7 +89,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -101,13 +102,13 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -118,7 +119,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -127,7 +128,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -445,7 +446,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -460,7 +461,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -472,7 +473,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -785,7 +786,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -801,7 +802,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -810,7 +811,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/intervals.go b/typedapi/types/intervals.go index 0d86f94dcc..03a2bccf5c 100644 --- a/typedapi/types/intervals.go +++ b/typedapi/types/intervals.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Intervals type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L83-L110 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L83-L110 type Intervals struct { // AllOf Returns matches that span a combination of other rules. AllOf *IntervalsAllOf `json:"all_of,omitempty"` diff --git a/typedapi/types/intervalsallof.go b/typedapi/types/intervalsallof.go index 26a219f2b1..b2d9951b2f 100644 --- a/typedapi/types/intervalsallof.go +++ b/typedapi/types/intervalsallof.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IntervalsAllOf type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L50-L70 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L50-L70 type IntervalsAllOf struct { // Filter Rule used to filter returned intervals. Filter *IntervalsFilter `json:"filter,omitempty"` @@ -63,12 +64,12 @@ func (s *IntervalsAllOf) UnmarshalJSON(data []byte) error { case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "intervals": if err := dec.Decode(&s.Intervals); err != nil { - return err + return fmt.Errorf("%s | %w", "Intervals", err) } case "max_gaps": @@ -79,7 +80,7 @@ func (s *IntervalsAllOf) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxGaps", err) } s.MaxGaps = &value case float64: @@ -94,7 +95,7 @@ func (s *IntervalsAllOf) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Ordered", err) } s.Ordered = &value case bool: diff --git a/typedapi/types/intervalsanyof.go b/typedapi/types/intervalsanyof.go index a63bc6ce82..6d08f267fa 100644 --- a/typedapi/types/intervalsanyof.go +++ b/typedapi/types/intervalsanyof.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IntervalsAnyOf type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L72-L81 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L72-L81 type IntervalsAnyOf struct { // Filter Rule used to filter returned intervals. Filter *IntervalsFilter `json:"filter,omitempty"` diff --git a/typedapi/types/intervalsfilter.go b/typedapi/types/intervalsfilter.go index a95294c06d..a228b2ff3c 100644 --- a/typedapi/types/intervalsfilter.go +++ b/typedapi/types/intervalsfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // IntervalsFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L112-L152 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L112-L152 type IntervalsFilter struct { // After Query used to return intervals that follow an interval from the `filter` // rule. @@ -77,48 +78,48 @@ func (s *IntervalsFilter) UnmarshalJSON(data []byte) error { case "after": if err := dec.Decode(&s.After); err != nil { - return err + return fmt.Errorf("%s | %w", "After", err) } case "before": if err := dec.Decode(&s.Before); err != nil { - return err + return fmt.Errorf("%s | %w", "Before", err) } case "contained_by": if err := dec.Decode(&s.ContainedBy); err != nil { - return err + return fmt.Errorf("%s | %w", "ContainedBy", err) } case "containing": if err := dec.Decode(&s.Containing); err != nil { - return err + return fmt.Errorf("%s | %w", "Containing", err) } case "not_contained_by": if err := dec.Decode(&s.NotContainedBy); err != nil { - return err + return fmt.Errorf("%s | %w", "NotContainedBy", err) } case "not_containing": if err := dec.Decode(&s.NotContaining); err != nil { - return err + return fmt.Errorf("%s | %w", "NotContaining", err) } case "not_overlapping": if err := dec.Decode(&s.NotOverlapping); err != nil { - return err + return fmt.Errorf("%s | %w", "NotOverlapping", err) } case "overlapping": if err := dec.Decode(&s.Overlapping); err != nil { - return err + return fmt.Errorf("%s | %w", "Overlapping", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -127,7 +128,7 @@ func (s *IntervalsFilter) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -136,7 +137,7 @@ func (s *IntervalsFilter) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -144,7 +145,7 @@ func (s *IntervalsFilter) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/intervalsfuzzy.go b/typedapi/types/intervalsfuzzy.go index b728db2859..da7d00cf11 100644 --- a/typedapi/types/intervalsfuzzy.go +++ b/typedapi/types/intervalsfuzzy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IntervalsFuzzy type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L154-L184 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L154-L184 type IntervalsFuzzy struct { // Analyzer Analyzer used to normalize the term. Analyzer *string `json:"analyzer,omitempty"` @@ -68,7 +69,7 @@ func (s *IntervalsFuzzy) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *IntervalsFuzzy) UnmarshalJSON(data []byte) error { case "fuzziness": if err := dec.Decode(&s.Fuzziness); err != nil { - return err + return fmt.Errorf("%s | %w", "Fuzziness", err) } case "prefix_length": @@ -90,7 +91,7 @@ func (s *IntervalsFuzzy) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrefixLength", err) } s.PrefixLength = &value case float64: @@ -101,7 +102,7 @@ func (s *IntervalsFuzzy) UnmarshalJSON(data []byte) error { case "term": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Term", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -117,7 +118,7 @@ func (s *IntervalsFuzzy) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Transpositions", err) } s.Transpositions = &value case bool: @@ -126,7 +127,7 @@ func (s *IntervalsFuzzy) UnmarshalJSON(data []byte) error { case "use_field": if err := dec.Decode(&s.UseField); err != nil { - return err + return fmt.Errorf("%s | %w", "UseField", err) } } diff --git a/typedapi/types/intervalsmatch.go b/typedapi/types/intervalsmatch.go index 66bba193d4..e8ba352d05 100644 --- a/typedapi/types/intervalsmatch.go +++ b/typedapi/types/intervalsmatch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IntervalsMatch type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L186-L216 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L186-L216 type IntervalsMatch struct { // Analyzer Analyzer used to analyze terms in the query. Analyzer *string `json:"analyzer,omitempty"` @@ -68,7 +69,7 @@ func (s *IntervalsMatch) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *IntervalsMatch) UnmarshalJSON(data []byte) error { case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "max_gaps": @@ -90,7 +91,7 @@ func (s *IntervalsMatch) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxGaps", err) } s.MaxGaps = &value case float64: @@ -105,7 +106,7 @@ func (s *IntervalsMatch) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Ordered", err) } s.Ordered = &value case bool: @@ -115,7 +116,7 @@ func (s *IntervalsMatch) UnmarshalJSON(data []byte) error { case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -126,7 +127,7 @@ func (s *IntervalsMatch) UnmarshalJSON(data []byte) error { case "use_field": if err := dec.Decode(&s.UseField); err != nil { - return err + return fmt.Errorf("%s | %w", "UseField", err) } } diff --git a/typedapi/types/intervalsprefix.go b/typedapi/types/intervalsprefix.go index a5fd1a648a..5c7df2fd72 100644 --- a/typedapi/types/intervalsprefix.go +++ b/typedapi/types/intervalsprefix.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IntervalsPrefix type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L218-L233 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L218-L233 type IntervalsPrefix struct { // Analyzer Analyzer used to analyze the `prefix`. Analyzer *string `json:"analyzer,omitempty"` @@ -61,7 +62,7 @@ func (s *IntervalsPrefix) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -73,7 +74,7 @@ func (s *IntervalsPrefix) UnmarshalJSON(data []byte) error { case "prefix": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Prefix", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,7 +85,7 @@ func (s *IntervalsPrefix) UnmarshalJSON(data []byte) error { case "use_field": if err := dec.Decode(&s.UseField); err != nil { - return err + return fmt.Errorf("%s | %w", "UseField", err) } } diff --git a/typedapi/types/intervalsquery.go b/typedapi/types/intervalsquery.go index 0a60b76759..0634a32e1d 100644 --- a/typedapi/types/intervalsquery.go +++ b/typedapi/types/intervalsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IntervalsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L235-L263 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L235-L263 type IntervalsQuery struct { // AllOf Returns matches that span a combination of other rules. AllOf *IntervalsAllOf `json:"all_of,omitempty"` @@ -71,12 +72,12 @@ func (s *IntervalsQuery) UnmarshalJSON(data []byte) error { case "all_of": if err := dec.Decode(&s.AllOf); err != nil { - return err + return fmt.Errorf("%s | %w", "AllOf", err) } case "any_of": if err := dec.Decode(&s.AnyOf); err != nil { - return err + return fmt.Errorf("%s | %w", "AnyOf", err) } case "boost": @@ -86,7 +87,7 @@ func (s *IntervalsQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -97,23 +98,23 @@ func (s *IntervalsQuery) UnmarshalJSON(data []byte) error { case "fuzzy": if err := dec.Decode(&s.Fuzzy); err != nil { - return err + return fmt.Errorf("%s | %w", "Fuzzy", err) } case "match": if err := dec.Decode(&s.Match); err != nil { - return err + return fmt.Errorf("%s | %w", "Match", err) } case "prefix": if err := dec.Decode(&s.Prefix); err != nil { - return err + return fmt.Errorf("%s | %w", "Prefix", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -124,7 +125,7 @@ func (s *IntervalsQuery) UnmarshalJSON(data []byte) error { case "wildcard": if err := dec.Decode(&s.Wildcard); err != nil { - return err + return fmt.Errorf("%s | %w", "Wildcard", err) } } diff --git a/typedapi/types/intervalswildcard.go b/typedapi/types/intervalswildcard.go index 9bd9803d8b..f75231a4e4 100644 --- a/typedapi/types/intervalswildcard.go +++ b/typedapi/types/intervalswildcard.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IntervalsWildcard type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L265-L280 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L265-L280 type IntervalsWildcard struct { // Analyzer Analyzer used to analyze the `pattern`. // Defaults to the top-level field's analyzer. @@ -62,7 +63,7 @@ func (s *IntervalsWildcard) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,7 +75,7 @@ func (s *IntervalsWildcard) UnmarshalJSON(data []byte) error { case "pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,7 +86,7 @@ func (s *IntervalsWildcard) UnmarshalJSON(data []byte) error { case "use_field": if err := dec.Decode(&s.UseField); err != nil { - return err + return fmt.Errorf("%s | %w", "UseField", err) } } diff --git a/typedapi/types/invertedindex.go b/typedapi/types/invertedindex.go index 528cd4bdbb..dba054dbe8 100644 --- a/typedapi/types/invertedindex.go +++ b/typedapi/types/invertedindex.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // InvertedIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L65-L73 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L65-L73 type InvertedIndex struct { Offsets uint `json:"offsets"` Payloads uint `json:"payloads"` diff --git a/typedapi/types/invocation.go b/typedapi/types/invocation.go index eaa3b7dfb6..900ee05ce0 100644 --- a/typedapi/types/invocation.go +++ b/typedapi/types/invocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Invocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/_types/SnapshotLifecycle.ts#L138-L141 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/_types/SnapshotLifecycle.ts#L138-L141 type Invocation struct { SnapshotName string `json:"snapshot_name"` Time DateTime `json:"time"` @@ -52,12 +53,12 @@ func (s *Invocation) UnmarshalJSON(data []byte) error { case "snapshot_name": if err := dec.Decode(&s.SnapshotName); err != nil { - return err + return fmt.Errorf("%s | %w", "SnapshotName", err) } case "time": if err := dec.Decode(&s.Time); err != nil { - return err + return fmt.Errorf("%s | %w", "Time", err) } } diff --git a/typedapi/types/invocations.go b/typedapi/types/invocations.go index f5cd1ccf76..c21bdbe90a 100644 --- a/typedapi/types/invocations.go +++ b/typedapi/types/invocations.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Invocations type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L44-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L44-L46 type Invocations struct { Total int64 `json:"total"` } @@ -57,7 +58,7 @@ func (s *Invocations) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: diff --git a/typedapi/types/iostatdevice.go b/typedapi/types/iostatdevice.go index 12306b8351..015d840be9 100644 --- a/typedapi/types/iostatdevice.go +++ b/typedapi/types/iostatdevice.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IoStatDevice type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L730-L755 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L730-L755 type IoStatDevice struct { // DeviceName The Linux device name. DeviceName *string `json:"device_name,omitempty"` @@ -69,7 +70,7 @@ func (s *IoStatDevice) UnmarshalJSON(data []byte) error { case "device_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DeviceName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,7 +86,7 @@ func (s *IoStatDevice) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Operations", err) } s.Operations = &value case float64: @@ -100,7 +101,7 @@ func (s *IoStatDevice) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ReadKilobytes", err) } s.ReadKilobytes = &value case float64: @@ -115,7 +116,7 @@ func (s *IoStatDevice) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ReadOperations", err) } s.ReadOperations = &value case float64: @@ -130,7 +131,7 @@ func (s *IoStatDevice) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "WriteKilobytes", err) } s.WriteKilobytes = &value case float64: @@ -145,7 +146,7 @@ func (s *IoStatDevice) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "WriteOperations", err) } s.WriteOperations = &value case float64: diff --git a/typedapi/types/iostats.go b/typedapi/types/iostats.go index e894c21ab9..fd8926602d 100644 --- a/typedapi/types/iostats.go +++ b/typedapi/types/iostats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // IoStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L718-L728 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L718-L728 type IoStats struct { // Devices Array of disk metrics for each device that is backing an Elasticsearch data // path. diff --git a/typedapi/types/ipfilter.go b/typedapi/types/ipfilter.go index 2a91446cb8..27f23c094a 100644 --- a/typedapi/types/ipfilter.go +++ b/typedapi/types/ipfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IpFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L167-L170 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L167-L170 type IpFilter struct { Http bool `json:"http"` Transport bool `json:"transport"` @@ -58,7 +59,7 @@ func (s *IpFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Http", err) } s.Http = value case bool: @@ -72,7 +73,7 @@ func (s *IpFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Transport", err) } s.Transport = value case bool: diff --git a/typedapi/types/ipprefixaggregate.go b/typedapi/types/ipprefixaggregate.go index c01f0e9772..97bf5c2a9f 100644 --- a/typedapi/types/ipprefixaggregate.go +++ b/typedapi/types/ipprefixaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // IpPrefixAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L629-L630 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L629-L630 type IpPrefixAggregate struct { Buckets BucketsIpPrefixBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *IpPrefixAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]IpPrefixBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []IpPrefixBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/ipprefixaggregation.go b/typedapi/types/ipprefixaggregation.go index ccb18abb4e..dffa5120ff 100644 --- a/typedapi/types/ipprefixaggregation.go +++ b/typedapi/types/ipprefixaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IpPrefixAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L1114-L1143 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L1114-L1143 type IpPrefixAggregation struct { // AppendPrefixLength Defines whether the prefix length is appended to IP address keys in the // response. @@ -75,7 +76,7 @@ func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AppendPrefixLength", err) } s.AppendPrefixLength = &value case bool: @@ -84,7 +85,7 @@ func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "is_ipv6": @@ -94,7 +95,7 @@ func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsIpv6", err) } s.IsIpv6 = &value case bool: @@ -108,7 +109,7 @@ func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Keyed", err) } s.Keyed = &value case bool: @@ -117,7 +118,7 @@ func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min_doc_count": @@ -127,7 +128,7 @@ func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocCount", err) } s.MinDocCount = &value case float64: @@ -138,7 +139,7 @@ func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -155,7 +156,7 @@ func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrefixLength", err) } s.PrefixLength = value case float64: diff --git a/typedapi/types/ipprefixbucket.go b/typedapi/types/ipprefixbucket.go index b54f75ab47..a1f4035d30 100644 --- a/typedapi/types/ipprefixbucket.go +++ b/typedapi/types/ipprefixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // IpPrefixBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L632-L637 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L632-L637 type IpPrefixBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -64,7 +64,7 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -79,7 +79,7 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsIpv6", err) } s.IsIpv6 = value case bool: @@ -89,7 +89,7 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { case "key": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -101,7 +101,7 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { case "netmask": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Netmask", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -118,7 +118,7 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrefixLength", err) } s.PrefixLength = value case float64: @@ -140,490 +140,490 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -633,7 +633,7 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/ipproperty.go b/typedapi/types/ipproperty.go index 700ebc15c5..dd04c0324c 100644 --- a/typedapi/types/ipproperty.go +++ b/typedapi/types/ipproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // IpProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/specialized.ts#L59-L73 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/specialized.ts#L59-L73 type IpProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -79,7 +80,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -94,13 +95,13 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -111,7 +112,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -120,7 +121,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -438,7 +439,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -453,7 +454,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -467,7 +468,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -479,13 +480,13 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -496,7 +497,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { case "on_script_error": if err := dec.Decode(&s.OnScriptError); err != nil { - return err + return fmt.Errorf("%s | %w", "OnScriptError", err) } case "properties": @@ -809,7 +810,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -818,7 +819,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -827,7 +828,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -835,7 +836,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -845,7 +846,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -861,7 +862,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -875,7 +876,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesDimension", err) } s.TimeSeriesDimension = &value case bool: @@ -884,7 +885,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/iprangeaggregate.go b/typedapi/types/iprangeaggregate.go index abb2034f45..96b4af02e4 100644 --- a/typedapi/types/iprangeaggregate.go +++ b/typedapi/types/iprangeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // IpRangeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L556-L558 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L556-L558 type IpRangeAggregate struct { Buckets BucketsIpRangeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *IpRangeAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]IpRangeBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []IpRangeBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/iprangeaggregation.go b/typedapi/types/iprangeaggregation.go index a63a5390e5..cb84fe27df 100644 --- a/typedapi/types/iprangeaggregation.go +++ b/typedapi/types/iprangeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IpRangeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L548-L557 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L548-L557 type IpRangeAggregation struct { // Field The date field whose values are used to build ranges. Field *string `json:"field,omitempty"` @@ -57,18 +58,18 @@ func (s *IpRangeAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *IpRangeAggregation) UnmarshalJSON(data []byte) error { case "ranges": if err := dec.Decode(&s.Ranges); err != nil { - return err + return fmt.Errorf("%s | %w", "Ranges", err) } } diff --git a/typedapi/types/iprangeaggregationrange.go b/typedapi/types/iprangeaggregationrange.go index fbafda583c..a46df6529f 100644 --- a/typedapi/types/iprangeaggregationrange.go +++ b/typedapi/types/iprangeaggregationrange.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // IpRangeAggregationRange type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L559-L572 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L559-L572 type IpRangeAggregationRange struct { // From Start of the range. From string `json:"from,omitempty"` @@ -58,7 +59,7 @@ func (s *IpRangeAggregationRange) UnmarshalJSON(data []byte) error { case "from": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -70,7 +71,7 @@ func (s *IpRangeAggregationRange) UnmarshalJSON(data []byte) error { case "mask": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Mask", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,7 +83,7 @@ func (s *IpRangeAggregationRange) UnmarshalJSON(data []byte) error { case "to": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "To", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/iprangebucket.go b/typedapi/types/iprangebucket.go index 1ca0f6bcd7..e995d98dda 100644 --- a/typedapi/types/iprangebucket.go +++ b/typedapi/types/iprangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // IpRangeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L560-L564 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L560-L564 type IpRangeBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -63,7 +63,7 @@ func (s *IpRangeBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -74,7 +74,7 @@ func (s *IpRangeBucket) UnmarshalJSON(data []byte) error { case "from": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -86,7 +86,7 @@ func (s *IpRangeBucket) UnmarshalJSON(data []byte) error { case "key": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -98,7 +98,7 @@ func (s *IpRangeBucket) UnmarshalJSON(data []byte) error { case "to": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "To", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -121,490 +121,490 @@ func (s *IpRangeBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -614,7 +614,7 @@ func (s *IpRangeBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/iprangeproperty.go b/typedapi/types/iprangeproperty.go index 6f1d25d373..cf1e84a218 100644 --- a/typedapi/types/iprangeproperty.go +++ b/typedapi/types/iprangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // IpRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/range.ts#L46-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/range.ts#L46-L48 type IpRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -72,7 +73,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -88,7 +89,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -101,13 +102,13 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -118,7 +119,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -127,7 +128,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -445,7 +446,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -460,7 +461,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -472,7 +473,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -785,7 +786,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -801,7 +802,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -810,7 +811,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/job.go b/typedapi/types/job.go index 28c7a25a5f..8a3672101f 100644 --- a/typedapi/types/job.go +++ b/typedapi/types/job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Job type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Job.ts#L61-L180 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Job.ts#L61-L180 type Job struct { // AllowLazyOpen Advanced configuration option. // Specifies whether this job can open when there is insufficient machine @@ -159,7 +160,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowLazyOpen", err) } s.AllowLazyOpen = value case bool: @@ -168,32 +169,32 @@ func (s *Job) UnmarshalJSON(data []byte) error { case "analysis_config": if err := dec.Decode(&s.AnalysisConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalysisConfig", err) } case "analysis_limits": if err := dec.Decode(&s.AnalysisLimits); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalysisLimits", err) } case "background_persist_interval": if err := dec.Decode(&s.BackgroundPersistInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "BackgroundPersistInterval", err) } case "blocked": if err := dec.Decode(&s.Blocked); err != nil { - return err + return fmt.Errorf("%s | %w", "Blocked", err) } case "create_time": if err := dec.Decode(&s.CreateTime); err != nil { - return err + return fmt.Errorf("%s | %w", "CreateTime", err) } case "custom_settings": if err := dec.Decode(&s.CustomSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "CustomSettings", err) } case "daily_model_snapshot_retention_after_days": @@ -203,7 +204,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DailyModelSnapshotRetentionAfterDays", err) } s.DailyModelSnapshotRetentionAfterDays = &value case float64: @@ -213,12 +214,12 @@ func (s *Job) UnmarshalJSON(data []byte) error { case "data_description": if err := dec.Decode(&s.DataDescription); err != nil { - return err + return fmt.Errorf("%s | %w", "DataDescription", err) } case "datafeed_config": if err := dec.Decode(&s.DatafeedConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "DatafeedConfig", err) } case "deleting": @@ -228,7 +229,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Deleting", err) } s.Deleting = &value case bool: @@ -238,7 +239,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -249,23 +250,23 @@ func (s *Job) UnmarshalJSON(data []byte) error { case "finished_time": if err := dec.Decode(&s.FinishedTime); err != nil { - return err + return fmt.Errorf("%s | %w", "FinishedTime", err) } case "groups": if err := dec.Decode(&s.Groups); err != nil { - return err + return fmt.Errorf("%s | %w", "Groups", err) } case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "job_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "JobType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -276,17 +277,17 @@ func (s *Job) UnmarshalJSON(data []byte) error { case "job_version": if err := dec.Decode(&s.JobVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "JobVersion", err) } case "model_plot_config": if err := dec.Decode(&s.ModelPlotConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelPlotConfig", err) } case "model_snapshot_id": if err := dec.Decode(&s.ModelSnapshotId); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSnapshotId", err) } case "model_snapshot_retention_days": @@ -296,7 +297,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSnapshotRetentionDays", err) } s.ModelSnapshotRetentionDays = value case float64: @@ -311,7 +312,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RenormalizationWindowDays", err) } s.RenormalizationWindowDays = &value case float64: @@ -321,7 +322,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { case "results_index_name": if err := dec.Decode(&s.ResultsIndexName); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsIndexName", err) } case "results_retention_days": @@ -331,7 +332,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsRetentionDays", err) } s.ResultsRetentionDays = &value case float64: diff --git a/typedapi/types/jobblocked.go b/typedapi/types/jobblocked.go index efd825ae70..1846498c37 100644 --- a/typedapi/types/jobblocked.go +++ b/typedapi/types/jobblocked.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/jobblockedreason" @@ -31,7 +32,7 @@ import ( // JobBlocked type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Job.ts#L392-L395 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Job.ts#L392-L395 type JobBlocked struct { Reason jobblockedreason.JobBlockedReason `json:"reason"` TaskId TaskId `json:"task_id,omitempty"` @@ -54,12 +55,12 @@ func (s *JobBlocked) UnmarshalJSON(data []byte) error { case "reason": if err := dec.Decode(&s.Reason); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } case "task_id": if err := dec.Decode(&s.TaskId); err != nil { - return err + return fmt.Errorf("%s | %w", "TaskId", err) } } diff --git a/typedapi/types/jobconfig.go b/typedapi/types/jobconfig.go index 731ae7afa3..bdc17c1731 100644 --- a/typedapi/types/jobconfig.go +++ b/typedapi/types/jobconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // JobConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Job.ts#L182-L283 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Job.ts#L182-L283 type JobConfig struct { // AllowLazyOpen Advanced configuration option. Specifies whether this job can open when there // is insufficient machine learning node capacity for it to be immediately @@ -140,7 +141,7 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowLazyOpen", err) } s.AllowLazyOpen = &value case bool: @@ -149,22 +150,22 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { case "analysis_config": if err := dec.Decode(&s.AnalysisConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalysisConfig", err) } case "analysis_limits": if err := dec.Decode(&s.AnalysisLimits); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalysisLimits", err) } case "background_persist_interval": if err := dec.Decode(&s.BackgroundPersistInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "BackgroundPersistInterval", err) } case "custom_settings": if err := dec.Decode(&s.CustomSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "CustomSettings", err) } case "daily_model_snapshot_retention_after_days": @@ -174,7 +175,7 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DailyModelSnapshotRetentionAfterDays", err) } s.DailyModelSnapshotRetentionAfterDays = &value case float64: @@ -184,18 +185,18 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { case "data_description": if err := dec.Decode(&s.DataDescription); err != nil { - return err + return fmt.Errorf("%s | %w", "DataDescription", err) } case "datafeed_config": if err := dec.Decode(&s.DatafeedConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "DatafeedConfig", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -206,18 +207,18 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { case "groups": if err := dec.Decode(&s.Groups); err != nil { - return err + return fmt.Errorf("%s | %w", "Groups", err) } case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "job_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "JobType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -228,7 +229,7 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { case "model_plot_config": if err := dec.Decode(&s.ModelPlotConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelPlotConfig", err) } case "model_snapshot_retention_days": @@ -238,7 +239,7 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSnapshotRetentionDays", err) } s.ModelSnapshotRetentionDays = &value case float64: @@ -253,7 +254,7 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RenormalizationWindowDays", err) } s.RenormalizationWindowDays = &value case float64: @@ -263,7 +264,7 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { case "results_index_name": if err := dec.Decode(&s.ResultsIndexName); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsIndexName", err) } case "results_retention_days": @@ -273,7 +274,7 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsRetentionDays", err) } s.ResultsRetentionDays = &value case float64: diff --git a/typedapi/types/jobforecaststatistics.go b/typedapi/types/jobforecaststatistics.go index 60107d7329..290197073a 100644 --- a/typedapi/types/jobforecaststatistics.go +++ b/typedapi/types/jobforecaststatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // JobForecastStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Job.ts#L343-L350 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Job.ts#L343-L350 type JobForecastStatistics struct { ForecastedJobs int `json:"forecasted_jobs"` MemoryBytes *JobStatistics `json:"memory_bytes,omitempty"` @@ -63,7 +64,7 @@ func (s *JobForecastStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastedJobs", err) } s.ForecastedJobs = value case float64: @@ -73,17 +74,17 @@ func (s *JobForecastStatistics) UnmarshalJSON(data []byte) error { case "memory_bytes": if err := dec.Decode(&s.MemoryBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "MemoryBytes", err) } case "processing_time_ms": if err := dec.Decode(&s.ProcessingTimeMs); err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessingTimeMs", err) } case "records": if err := dec.Decode(&s.Records); err != nil { - return err + return fmt.Errorf("%s | %w", "Records", err) } case "status": @@ -91,7 +92,7 @@ func (s *JobForecastStatistics) UnmarshalJSON(data []byte) error { s.Status = make(map[string]int64, 0) } if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "total": @@ -101,7 +102,7 @@ func (s *JobForecastStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: diff --git a/typedapi/types/jobsrecord.go b/typedapi/types/jobsrecord.go index 28d83fc75e..89b402c18a 100644 --- a/typedapi/types/jobsrecord.go +++ b/typedapi/types/jobsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // JobsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/ml_jobs/types.ts#L24-L347 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/ml_jobs/types.ts#L24-L347 type JobsRecord struct { // AssignmentExplanation For open anomaly detection jobs only, contains messages relating to the // selection of a node to run the job. @@ -243,7 +244,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "assignment_explanation", "ae": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AssignmentExplanation", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -255,7 +256,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "buckets.count", "bc", "bucketsCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -267,7 +268,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "buckets.time.exp_avg", "btea", "bucketsTimeExpAvg": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsTimeExpAvg", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -279,7 +280,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "buckets.time.exp_avg_hour", "bteah", "bucketsTimeExpAvgHour": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsTimeExpAvgHour", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -291,7 +292,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "buckets.time.max", "btmax", "bucketsTimeMax": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsTimeMax", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -303,7 +304,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "buckets.time.min", "btmin", "bucketsTimeMin": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsTimeMin", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -315,7 +316,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "buckets.time.total", "btt", "bucketsTimeTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsTimeTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -327,7 +328,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.buckets", "db", "dataBuckets": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataBuckets", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -339,7 +340,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.earliest_record", "der", "dataEarliestRecord": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataEarliestRecord", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -351,7 +352,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.empty_buckets", "deb", "dataEmptyBuckets": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataEmptyBuckets", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -362,13 +363,13 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.input_bytes", "dib", "dataInputBytes": if err := dec.Decode(&s.DataInputBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "DataInputBytes", err) } case "data.input_fields", "dif", "dataInputFields": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataInputFields", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -380,7 +381,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.input_records", "dir", "dataInputRecords": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataInputRecords", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -392,7 +393,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.invalid_dates", "did", "dataInvalidDates": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataInvalidDates", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -404,7 +405,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.last", "dl", "dataLast": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataLast", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -416,7 +417,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.last_empty_bucket", "dleb", "dataLastEmptyBucket": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataLastEmptyBucket", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -428,7 +429,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.last_sparse_bucket", "dlsb", "dataLastSparseBucket": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataLastSparseBucket", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -440,7 +441,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.latest_record", "dlr", "dataLatestRecord": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataLatestRecord", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -452,7 +453,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.missing_fields", "dmf", "dataMissingFields": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataMissingFields", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -464,7 +465,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.out_of_order_timestamps", "doot", "dataOutOfOrderTimestamps": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataOutOfOrderTimestamps", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -476,7 +477,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.processed_fields", "dpf", "dataProcessedFields": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataProcessedFields", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -488,7 +489,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.processed_records", "dpr", "dataProcessedRecords": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataProcessedRecords", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -500,7 +501,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "data.sparse_buckets", "dsb", "dataSparseBuckets": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataSparseBuckets", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -512,7 +513,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "forecasts.memory.avg", "fmavg", "forecastsMemoryAvg": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsMemoryAvg", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -524,7 +525,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "forecasts.memory.max", "fmmax", "forecastsMemoryMax": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsMemoryMax", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -536,7 +537,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "forecasts.memory.min", "fmmin", "forecastsMemoryMin": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsMemoryMin", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -548,7 +549,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "forecasts.memory.total", "fmt", "forecastsMemoryTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsMemoryTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -560,7 +561,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "forecasts.records.avg", "fravg", "forecastsRecordsAvg": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsRecordsAvg", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -572,7 +573,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "forecasts.records.max", "frmax", "forecastsRecordsMax": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsRecordsMax", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -584,7 +585,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "forecasts.records.min", "frmin", "forecastsRecordsMin": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsRecordsMin", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -596,7 +597,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "forecasts.records.total", "frt", "forecastsRecordsTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsRecordsTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -608,7 +609,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "forecasts.time.avg", "ftavg", "forecastsTimeAvg": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsTimeAvg", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -620,7 +621,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "forecasts.time.max", "ftmax", "forecastsTimeMax": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsTimeMax", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -632,7 +633,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "forecasts.time.min", "ftmin", "forecastsTimeMin": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsTimeMin", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -644,7 +645,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "forecasts.time.total", "ftt", "forecastsTimeTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsTimeTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -656,7 +657,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "forecasts.total", "ft", "forecastsTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -667,13 +668,13 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "model.bucket_allocation_failures", "mbaf", "modelBucketAllocationFailures": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelBucketAllocationFailures", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -685,7 +686,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "model.by_fields", "mbf", "modelByFields": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelByFields", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -696,23 +697,23 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "model.bytes", "mb", "modelBytes": if err := dec.Decode(&s.ModelBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelBytes", err) } case "model.bytes_exceeded", "mbe", "modelBytesExceeded": if err := dec.Decode(&s.ModelBytesExceeded); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelBytesExceeded", err) } case "model.categorization_status", "mcs", "modelCategorizationStatus": if err := dec.Decode(&s.ModelCategorizationStatus); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelCategorizationStatus", err) } case "model.categorized_doc_count", "mcdc", "modelCategorizedDocCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelCategorizedDocCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -724,7 +725,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "model.dead_category_count", "mdcc", "modelDeadCategoryCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelDeadCategoryCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -736,7 +737,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "model.failed_category_count", "mfcc", "modelFailedCategoryCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelFailedCategoryCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -748,7 +749,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "model.frequent_category_count", "modelFrequentCategoryCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelFrequentCategoryCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -760,7 +761,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "model.log_time", "mlt", "modelLogTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelLogTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -772,7 +773,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "model.memory_limit", "mml", "modelMemoryLimit": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelMemoryLimit", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -783,13 +784,13 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "model.memory_status", "mms", "modelMemoryStatus": if err := dec.Decode(&s.ModelMemoryStatus); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelMemoryStatus", err) } case "model.over_fields", "mof", "modelOverFields": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelOverFields", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -801,7 +802,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "model.partition_fields", "mpf", "modelPartitionFields": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelPartitionFields", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -813,7 +814,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "model.rare_category_count", "mrcc", "modelRareCategoryCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelRareCategoryCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -825,7 +826,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "model.timestamp", "mt", "modelTimestamp": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelTimestamp", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -837,7 +838,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "model.total_category_count", "mtcc", "modelTotalCategoryCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelTotalCategoryCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -849,7 +850,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "node.address", "na", "nodeAddress": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeAddress", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -860,18 +861,18 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "node.ephemeral_id", "ne", "nodeEphemeralId": if err := dec.Decode(&s.NodeEphemeralId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeEphemeralId", err) } case "node.id", "ni", "nodeId": if err := dec.Decode(&s.NodeId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } case "node.name", "nn", "nodeName": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -883,7 +884,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "opened_time", "ot": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "OpenedTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -894,7 +895,7 @@ func (s *JobsRecord) UnmarshalJSON(data []byte) error { case "state", "s": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } } diff --git a/typedapi/types/jobstatistics.go b/typedapi/types/jobstatistics.go index ce9c203c72..452f4a3c42 100644 --- a/typedapi/types/jobstatistics.go +++ b/typedapi/types/jobstatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // JobStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Job.ts#L54-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Job.ts#L54-L59 type JobStatistics struct { Avg Float64 `json:"avg"` Max Float64 `json:"max"` @@ -60,7 +61,7 @@ func (s *JobStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Avg", err) } f := Float64(value) s.Avg = f @@ -76,7 +77,7 @@ func (s *JobStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } f := Float64(value) s.Max = f @@ -92,7 +93,7 @@ func (s *JobStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } f := Float64(value) s.Min = f @@ -108,7 +109,7 @@ func (s *JobStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } f := Float64(value) s.Total = f diff --git a/typedapi/types/jobstats.go b/typedapi/types/jobstats.go index 593570ce1b..7defb2802e 100644 --- a/typedapi/types/jobstats.go +++ b/typedapi/types/jobstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // JobStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Job.ts#L284-L330 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Job.ts#L284-L330 type JobStats struct { // AssignmentExplanation For open anomaly detection jobs only, contains messages relating to the // selection of a node to run the job. @@ -85,7 +86,7 @@ func (s *JobStats) UnmarshalJSON(data []byte) error { case "assignment_explanation": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AssignmentExplanation", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -96,7 +97,7 @@ func (s *JobStats) UnmarshalJSON(data []byte) error { case "data_counts": if err := dec.Decode(&s.DataCounts); err != nil { - return err + return fmt.Errorf("%s | %w", "DataCounts", err) } case "deleting": @@ -106,7 +107,7 @@ func (s *JobStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Deleting", err) } s.Deleting = &value case bool: @@ -115,13 +116,13 @@ func (s *JobStats) UnmarshalJSON(data []byte) error { case "forecasts_stats": if err := dec.Decode(&s.ForecastsStats); err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastsStats", err) } case "job_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -132,27 +133,27 @@ func (s *JobStats) UnmarshalJSON(data []byte) error { case "model_size_stats": if err := dec.Decode(&s.ModelSizeStats); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSizeStats", err) } case "node": if err := dec.Decode(&s.Node); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } case "open_time": if err := dec.Decode(&s.OpenTime); err != nil { - return err + return fmt.Errorf("%s | %w", "OpenTime", err) } case "state": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } case "timing_stats": if err := dec.Decode(&s.TimingStats); err != nil { - return err + return fmt.Errorf("%s | %w", "TimingStats", err) } } diff --git a/typedapi/types/jobtimingstats.go b/typedapi/types/jobtimingstats.go index 4ad4e19399..cc707df4fd 100644 --- a/typedapi/types/jobtimingstats.go +++ b/typedapi/types/jobtimingstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // JobTimingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Job.ts#L332-L341 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Job.ts#L332-L341 type JobTimingStats struct { AverageBucketProcessingTimeMs Float64 `json:"average_bucket_processing_time_ms,omitempty"` BucketCount int64 `json:"bucket_count"` @@ -59,7 +60,7 @@ func (s *JobTimingStats) UnmarshalJSON(data []byte) error { case "average_bucket_processing_time_ms": if err := dec.Decode(&s.AverageBucketProcessingTimeMs); err != nil { - return err + return fmt.Errorf("%s | %w", "AverageBucketProcessingTimeMs", err) } case "bucket_count": @@ -69,7 +70,7 @@ func (s *JobTimingStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BucketCount", err) } s.BucketCount = value case float64: @@ -79,32 +80,32 @@ func (s *JobTimingStats) UnmarshalJSON(data []byte) error { case "exponential_average_bucket_processing_time_ms": if err := dec.Decode(&s.ExponentialAverageBucketProcessingTimeMs); err != nil { - return err + return fmt.Errorf("%s | %w", "ExponentialAverageBucketProcessingTimeMs", err) } case "exponential_average_bucket_processing_time_per_hour_ms": if err := dec.Decode(&s.ExponentialAverageBucketProcessingTimePerHourMs); err != nil { - return err + return fmt.Errorf("%s | %w", "ExponentialAverageBucketProcessingTimePerHourMs", err) } case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "maximum_bucket_processing_time_ms": if err := dec.Decode(&s.MaximumBucketProcessingTimeMs); err != nil { - return err + return fmt.Errorf("%s | %w", "MaximumBucketProcessingTimeMs", err) } case "minimum_bucket_processing_time_ms": if err := dec.Decode(&s.MinimumBucketProcessingTimeMs); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumBucketProcessingTimeMs", err) } case "total_bucket_processing_time_ms": if err := dec.Decode(&s.TotalBucketProcessingTimeMs); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalBucketProcessingTimeMs", err) } } diff --git a/typedapi/types/jobusage.go b/typedapi/types/jobusage.go index cb410e39b7..df24dca67d 100644 --- a/typedapi/types/jobusage.go +++ b/typedapi/types/jobusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // JobUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L364-L370 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L364-L370 type JobUsage struct { Count int `json:"count"` CreatedBy map[string]int64 `json:"created_by"` @@ -62,7 +63,7 @@ func (s *JobUsage) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -75,22 +76,22 @@ func (s *JobUsage) UnmarshalJSON(data []byte) error { s.CreatedBy = make(map[string]int64, 0) } if err := dec.Decode(&s.CreatedBy); err != nil { - return err + return fmt.Errorf("%s | %w", "CreatedBy", err) } case "detectors": if err := dec.Decode(&s.Detectors); err != nil { - return err + return fmt.Errorf("%s | %w", "Detectors", err) } case "forecasts": if err := dec.Decode(&s.Forecasts); err != nil { - return err + return fmt.Errorf("%s | %w", "Forecasts", err) } case "model_size": if err := dec.Decode(&s.ModelSize); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSize", err) } } diff --git a/typedapi/types/joinprocessor.go b/typedapi/types/joinprocessor.go index b21c177d51..8d94d9ea54 100644 --- a/typedapi/types/joinprocessor.go +++ b/typedapi/types/joinprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // JoinProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L801-L816 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L801-L816 type JoinProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -71,7 +72,7 @@ func (s *JoinProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,13 +83,13 @@ func (s *JoinProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,7 +105,7 @@ func (s *JoinProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -113,13 +114,13 @@ func (s *JoinProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "separator": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Separator", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -131,7 +132,7 @@ func (s *JoinProcessor) UnmarshalJSON(data []byte) error { case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -142,7 +143,7 @@ func (s *JoinProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/joinproperty.go b/typedapi/types/joinproperty.go index 1f92367aab..cf6d01d72e 100644 --- a/typedapi/types/joinproperty.go +++ b/typedapi/types/joinproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // JoinProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L83-L87 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L83-L87 type JoinProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` EagerGlobalOrdinals *bool `json:"eager_global_ordinals,omitempty"` @@ -62,7 +63,7 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "eager_global_ordinals": @@ -72,7 +73,7 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EagerGlobalOrdinals", err) } s.EagerGlobalOrdinals = &value case bool: @@ -394,7 +395,7 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -407,7 +408,7 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -729,14 +730,14 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { o := new(string) err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) if err != nil { - return err + return fmt.Errorf("%s | %w", "Relations", err) } s.Relations[key] = append(s.Relations[key], *o) default: o := []string{} err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) if err != nil { - return err + return fmt.Errorf("%s | %w", "Relations", err) } s.Relations[key] = o } @@ -744,7 +745,7 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/jsonprocessor.go b/typedapi/types/jsonprocessor.go index f33dc50dfe..f0f28f383c 100644 --- a/typedapi/types/jsonprocessor.go +++ b/typedapi/types/jsonprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // JsonProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L818-L847 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L818-L847 type JsonProcessor struct { // AddToRoot Flag that forces the parsed JSON to be added at the top level of the // document. @@ -88,7 +89,7 @@ func (s *JsonProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AddToRoot", err) } s.AddToRoot = &value case bool: @@ -97,7 +98,7 @@ func (s *JsonProcessor) UnmarshalJSON(data []byte) error { case "add_to_root_conflict_strategy": if err := dec.Decode(&s.AddToRootConflictStrategy); err != nil { - return err + return fmt.Errorf("%s | %w", "AddToRootConflictStrategy", err) } case "allow_duplicate_keys": @@ -107,7 +108,7 @@ func (s *JsonProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowDuplicateKeys", err) } s.AllowDuplicateKeys = &value case bool: @@ -117,7 +118,7 @@ func (s *JsonProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -128,13 +129,13 @@ func (s *JsonProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -150,7 +151,7 @@ func (s *JsonProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -159,13 +160,13 @@ func (s *JsonProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -176,7 +177,7 @@ func (s *JsonProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/jvm.go b/typedapi/types/jvm.go index b4ada961ef..ed4c17db93 100644 --- a/typedapi/types/jvm.go +++ b/typedapi/types/jvm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Jvm type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L811-L845 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L811-L845 type Jvm struct { // BufferPools Contains statistics about JVM buffer pools for the node. BufferPools map[string]NodeBufferPool `json:"buffer_pools,omitempty"` @@ -71,27 +72,27 @@ func (s *Jvm) UnmarshalJSON(data []byte) error { s.BufferPools = make(map[string]NodeBufferPool, 0) } if err := dec.Decode(&s.BufferPools); err != nil { - return err + return fmt.Errorf("%s | %w", "BufferPools", err) } case "classes": if err := dec.Decode(&s.Classes); err != nil { - return err + return fmt.Errorf("%s | %w", "Classes", err) } case "gc": if err := dec.Decode(&s.Gc); err != nil { - return err + return fmt.Errorf("%s | %w", "Gc", err) } case "mem": if err := dec.Decode(&s.Mem); err != nil { - return err + return fmt.Errorf("%s | %w", "Mem", err) } case "threads": if err := dec.Decode(&s.Threads); err != nil { - return err + return fmt.Errorf("%s | %w", "Threads", err) } case "timestamp": @@ -101,7 +102,7 @@ func (s *Jvm) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } s.Timestamp = &value case float64: @@ -112,7 +113,7 @@ func (s *Jvm) UnmarshalJSON(data []byte) error { case "uptime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Uptime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -128,7 +129,7 @@ func (s *Jvm) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UptimeInMillis", err) } s.UptimeInMillis = &value case float64: diff --git a/typedapi/types/jvmclasses.go b/typedapi/types/jvmclasses.go index 5dd339fec8..fd3df8141c 100644 --- a/typedapi/types/jvmclasses.go +++ b/typedapi/types/jvmclasses.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // JvmClasses type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L908-L921 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L908-L921 type JvmClasses struct { // CurrentLoadedCount Number of classes currently loaded by JVM. CurrentLoadedCount *int64 `json:"current_loaded_count,omitempty"` @@ -62,7 +63,7 @@ func (s *JvmClasses) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CurrentLoadedCount", err) } s.CurrentLoadedCount = &value case float64: @@ -77,7 +78,7 @@ func (s *JvmClasses) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalLoadedCount", err) } s.TotalLoadedCount = &value case float64: @@ -92,7 +93,7 @@ func (s *JvmClasses) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalUnloadedCount", err) } s.TotalUnloadedCount = &value case float64: diff --git a/typedapi/types/jvmmemorystats.go b/typedapi/types/jvmmemorystats.go index cf60bf84b5..8412240e51 100644 --- a/typedapi/types/jvmmemorystats.go +++ b/typedapi/types/jvmmemorystats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // JvmMemoryStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L847-L876 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L847-L876 type JvmMemoryStats struct { // HeapCommittedInBytes Amount of memory, in bytes, available for use by the heap. HeapCommittedInBytes *int64 `json:"heap_committed_in_bytes,omitempty"` @@ -70,7 +71,7 @@ func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "HeapCommittedInBytes", err) } s.HeapCommittedInBytes = &value case float64: @@ -85,7 +86,7 @@ func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "HeapMaxInBytes", err) } s.HeapMaxInBytes = &value case float64: @@ -100,7 +101,7 @@ func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "HeapUsedInBytes", err) } s.HeapUsedInBytes = &value case float64: @@ -115,7 +116,7 @@ func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "HeapUsedPercent", err) } s.HeapUsedPercent = &value case float64: @@ -130,7 +131,7 @@ func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NonHeapCommittedInBytes", err) } s.NonHeapCommittedInBytes = &value case float64: @@ -145,7 +146,7 @@ func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NonHeapUsedInBytes", err) } s.NonHeapUsedInBytes = &value case float64: @@ -158,7 +159,7 @@ func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { s.Pools = make(map[string]Pool, 0) } if err := dec.Decode(&s.Pools); err != nil { - return err + return fmt.Errorf("%s | %w", "Pools", err) } } diff --git a/typedapi/types/jvmstats.go b/typedapi/types/jvmstats.go index ba33cb6214..4843b26573 100644 --- a/typedapi/types/jvmstats.go +++ b/typedapi/types/jvmstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // JvmStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_memory_stats/types.ts#L50-L63 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_memory_stats/types.ts#L50-L63 type JvmStats struct { // HeapMax Maximum amount of memory available for use by the heap. HeapMax ByteSize `json:"heap_max,omitempty"` @@ -65,7 +66,7 @@ func (s *JvmStats) UnmarshalJSON(data []byte) error { case "heap_max": if err := dec.Decode(&s.HeapMax); err != nil { - return err + return fmt.Errorf("%s | %w", "HeapMax", err) } case "heap_max_in_bytes": @@ -76,7 +77,7 @@ func (s *JvmStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "HeapMaxInBytes", err) } s.HeapMaxInBytes = value case float64: @@ -86,7 +87,7 @@ func (s *JvmStats) UnmarshalJSON(data []byte) error { case "java_inference": if err := dec.Decode(&s.JavaInference); err != nil { - return err + return fmt.Errorf("%s | %w", "JavaInference", err) } case "java_inference_in_bytes": @@ -97,7 +98,7 @@ func (s *JvmStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "JavaInferenceInBytes", err) } s.JavaInferenceInBytes = value case float64: @@ -107,7 +108,7 @@ func (s *JvmStats) UnmarshalJSON(data []byte) error { case "java_inference_max": if err := dec.Decode(&s.JavaInferenceMax); err != nil { - return err + return fmt.Errorf("%s | %w", "JavaInferenceMax", err) } case "java_inference_max_in_bytes": @@ -118,7 +119,7 @@ func (s *JvmStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "JavaInferenceMaxInBytes", err) } s.JavaInferenceMaxInBytes = value case float64: diff --git a/typedapi/types/jvmthreads.go b/typedapi/types/jvmthreads.go index 3bb8c29995..ec5c968461 100644 --- a/typedapi/types/jvmthreads.go +++ b/typedapi/types/jvmthreads.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // JvmThreads type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L897-L906 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L897-L906 type JvmThreads struct { // Count Number of active threads in use by JVM. Count *int64 `json:"count,omitempty"` @@ -60,7 +61,7 @@ func (s *JvmThreads) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = &value case float64: @@ -75,7 +76,7 @@ func (s *JvmThreads) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PeakCount", err) } s.PeakCount = &value case float64: diff --git a/typedapi/types/keeptypestokenfilter.go b/typedapi/types/keeptypestokenfilter.go index 2658fe4b0d..ac10d75180 100644 --- a/typedapi/types/keeptypestokenfilter.go +++ b/typedapi/types/keeptypestokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/keeptypesmode" @@ -31,7 +32,7 @@ import ( // KeepTypesTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L218-L222 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L218-L222 type KeepTypesTokenFilter struct { Mode *keeptypesmode.KeepTypesMode `json:"mode,omitempty"` Type string `json:"type,omitempty"` @@ -56,22 +57,22 @@ func (s *KeepTypesTokenFilter) UnmarshalJSON(data []byte) error { case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "types": if err := dec.Decode(&s.Types); err != nil { - return err + return fmt.Errorf("%s | %w", "Types", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/keepwordstokenfilter.go b/typedapi/types/keepwordstokenfilter.go index 58d77c6c3d..76b65e313e 100644 --- a/typedapi/types/keepwordstokenfilter.go +++ b/typedapi/types/keepwordstokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // KeepWordsTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L224-L229 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L224-L229 type KeepWordsTokenFilter struct { KeepWords []string `json:"keep_words,omitempty"` KeepWordsCase *bool `json:"keep_words_case,omitempty"` @@ -56,7 +57,7 @@ func (s *KeepWordsTokenFilter) UnmarshalJSON(data []byte) error { case "keep_words": if err := dec.Decode(&s.KeepWords); err != nil { - return err + return fmt.Errorf("%s | %w", "KeepWords", err) } case "keep_words_case": @@ -66,7 +67,7 @@ func (s *KeepWordsTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "KeepWordsCase", err) } s.KeepWordsCase = &value case bool: @@ -76,7 +77,7 @@ func (s *KeepWordsTokenFilter) UnmarshalJSON(data []byte) error { case "keep_words_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "KeepWordsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -87,12 +88,12 @@ func (s *KeepWordsTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/keyedpercentiles.go b/typedapi/types/keyedpercentiles.go index 5de73bc917..e8259863a1 100644 --- a/typedapi/types/keyedpercentiles.go +++ b/typedapi/types/keyedpercentiles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -34,7 +34,7 @@ import ( // KeyedPercentiles type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L158-L158 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L158-L158 type KeyedPercentiles map[string]string func (s KeyedPercentiles) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/keyedprocessor.go b/typedapi/types/keyedprocessor.go index 3b93900377..b7b4518018 100644 --- a/typedapi/types/keyedprocessor.go +++ b/typedapi/types/keyedprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // KeyedProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L379-L382 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L379-L382 type KeyedProcessor struct { Stats *Processor `json:"stats,omitempty"` Type *string `json:"type,omitempty"` @@ -53,13 +54,13 @@ func (s *KeyedProcessor) UnmarshalJSON(data []byte) error { case "stats": if err := dec.Decode(&s.Stats); err != nil { - return err + return fmt.Errorf("%s | %w", "Stats", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/keyvalueprocessor.go b/typedapi/types/keyvalueprocessor.go index fcd7f7c703..2b66d46bb1 100644 --- a/typedapi/types/keyvalueprocessor.go +++ b/typedapi/types/keyvalueprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // KeyValueProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L856-L908 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L856-L908 type KeyValueProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -93,7 +94,7 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,18 +105,18 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { case "exclude_keys": if err := dec.Decode(&s.ExcludeKeys); err != nil { - return err + return fmt.Errorf("%s | %w", "ExcludeKeys", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "field_split": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldSplit", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -127,7 +128,7 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -143,7 +144,7 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -157,7 +158,7 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -166,18 +167,18 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { case "include_keys": if err := dec.Decode(&s.IncludeKeys); err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeKeys", err) } case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "prefix": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Prefix", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -193,7 +194,7 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "StripBrackets", err) } s.StripBrackets = &value case bool: @@ -203,7 +204,7 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -214,13 +215,13 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } case "trim_key": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TrimKey", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -232,7 +233,7 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { case "trim_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TrimValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -244,7 +245,7 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { case "value_split": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueSplit", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/keywordanalyzer.go b/typedapi/types/keywordanalyzer.go index 851254c77c..aded8eb131 100644 --- a/typedapi/types/keywordanalyzer.go +++ b/typedapi/types/keywordanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // KeywordAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/analyzers.ts#L47-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/analyzers.ts#L47-L50 type KeywordAnalyzer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` @@ -52,12 +53,12 @@ func (s *KeywordAnalyzer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/keywordmarkertokenfilter.go b/typedapi/types/keywordmarkertokenfilter.go index f745309c65..8af6106a82 100644 --- a/typedapi/types/keywordmarkertokenfilter.go +++ b/typedapi/types/keywordmarkertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // KeywordMarkerTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L231-L237 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L231-L237 type KeywordMarkerTokenFilter struct { IgnoreCase *bool `json:"ignore_case,omitempty"` Keywords []string `json:"keywords,omitempty"` @@ -62,7 +63,7 @@ func (s *KeywordMarkerTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreCase", err) } s.IgnoreCase = &value case bool: @@ -71,13 +72,13 @@ func (s *KeywordMarkerTokenFilter) UnmarshalJSON(data []byte) error { case "keywords": if err := dec.Decode(&s.Keywords); err != nil { - return err + return fmt.Errorf("%s | %w", "Keywords", err) } case "keywords_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "KeywordsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -89,7 +90,7 @@ func (s *KeywordMarkerTokenFilter) UnmarshalJSON(data []byte) error { case "keywords_pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "KeywordsPattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -100,12 +101,12 @@ func (s *KeywordMarkerTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/keywordproperty.go b/typedapi/types/keywordproperty.go index 1da687d3d9..e5120704e5 100644 --- a/typedapi/types/keywordproperty.go +++ b/typedapi/types/keywordproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // KeywordProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L89-L105 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L89-L105 type KeywordProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -81,7 +82,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -96,13 +97,13 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -113,7 +114,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -122,7 +123,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "eager_global_ordinals": @@ -132,7 +133,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EagerGlobalOrdinals", err) } s.EagerGlobalOrdinals = &value case bool: @@ -454,7 +455,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -469,7 +470,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -478,7 +479,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case "index_options": if err := dec.Decode(&s.IndexOptions); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexOptions", err) } case "meta": @@ -486,13 +487,13 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "normalizer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Normalizer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -508,7 +509,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Norms", err) } s.Norms = &value case bool: @@ -518,7 +519,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case "null_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -837,7 +838,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -853,7 +854,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SplitQueriesOnWhitespace", err) } s.SplitQueriesOnWhitespace = &value case bool: @@ -867,7 +868,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -881,7 +882,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesDimension", err) } s.TimeSeriesDimension = &value case bool: @@ -890,7 +891,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/keywordtokenizer.go b/typedapi/types/keywordtokenizer.go index 3bb3c0e4e5..5db0e44365 100644 --- a/typedapi/types/keywordtokenizer.go +++ b/typedapi/types/keywordtokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // KeywordTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L62-L65 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L62-L65 type KeywordTokenizer struct { BufferSize int `json:"buffer_size"` Type string `json:"type,omitempty"` @@ -60,7 +61,7 @@ func (s *KeywordTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "BufferSize", err) } s.BufferSize = value case float64: @@ -70,12 +71,12 @@ func (s *KeywordTokenizer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/kibanatoken.go b/typedapi/types/kibanatoken.go index 653586c873..26b69a34d9 100644 --- a/typedapi/types/kibanatoken.go +++ b/typedapi/types/kibanatoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // KibanaToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/enroll_kibana/Response.ts#L27-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/enroll_kibana/Response.ts#L27-L30 type KibanaToken struct { Name string `json:"name"` Value string `json:"value"` @@ -54,7 +55,7 @@ func (s *KibanaToken) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,7 +67,7 @@ func (s *KibanaToken) UnmarshalJSON(data []byte) error { case "value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/knnquery.go b/typedapi/types/knnquery.go index 95a60e4b44..b9b57f595b 100644 --- a/typedapi/types/knnquery.go +++ b/typedapi/types/knnquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // KnnQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Knn.ts#L27-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Knn.ts#L27-L49 type KnnQuery struct { // Boost Boost value to apply to kNN scores Boost *float32 `json:"boost,omitempty"` @@ -75,7 +76,7 @@ func (s *KnnQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -86,7 +87,7 @@ func (s *KnnQuery) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "filter": @@ -95,19 +96,19 @@ func (s *KnnQuery) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewQuery() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } s.Filter = append(s.Filter, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } } case "inner_hits": if err := dec.Decode(&s.InnerHits); err != nil { - return err + return fmt.Errorf("%s | %w", "InnerHits", err) } case "k": @@ -117,7 +118,7 @@ func (s *KnnQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "K", err) } s.K = value case float64: @@ -132,7 +133,7 @@ func (s *KnnQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumCandidates", err) } s.NumCandidates = value case float64: @@ -142,12 +143,12 @@ func (s *KnnQuery) UnmarshalJSON(data []byte) error { case "query_vector": if err := dec.Decode(&s.QueryVector); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryVector", err) } case "query_vector_builder": if err := dec.Decode(&s.QueryVectorBuilder); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryVectorBuilder", err) } case "similarity": @@ -157,7 +158,7 @@ func (s *KnnQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } f := float32(value) s.Similarity = &f diff --git a/typedapi/types/kstemtokenfilter.go b/typedapi/types/kstemtokenfilter.go index 273d77da78..03fc300031 100644 --- a/typedapi/types/kstemtokenfilter.go +++ b/typedapi/types/kstemtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // KStemTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L239-L241 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L239-L241 type KStemTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` @@ -52,12 +53,12 @@ func (s *KStemTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/kuromojianalyzer.go b/typedapi/types/kuromojianalyzer.go index 2deb7c3836..99cc060911 100644 --- a/typedapi/types/kuromojianalyzer.go +++ b/typedapi/types/kuromojianalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // KuromojiAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/kuromoji-plugin.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/kuromoji-plugin.ts#L25-L29 type KuromojiAnalyzer struct { Mode kuromojitokenizationmode.KuromojiTokenizationMode `json:"mode"` Type string `json:"type,omitempty"` @@ -56,18 +57,18 @@ func (s *KuromojiAnalyzer) UnmarshalJSON(data []byte) error { case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "user_dictionary": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UserDictionary", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/kuromojiiterationmarkcharfilter.go b/typedapi/types/kuromojiiterationmarkcharfilter.go index 7c61b2ba5f..ea5c9f2571 100644 --- a/typedapi/types/kuromojiiterationmarkcharfilter.go +++ b/typedapi/types/kuromojiiterationmarkcharfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // KuromojiIterationMarkCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/kuromoji-plugin.ts#L31-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/kuromoji-plugin.ts#L31-L35 type KuromojiIterationMarkCharFilter struct { NormalizeKana bool `json:"normalize_kana"` NormalizeKanji bool `json:"normalize_kanji"` @@ -60,7 +61,7 @@ func (s *KuromojiIterationMarkCharFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NormalizeKana", err) } s.NormalizeKana = value case bool: @@ -74,7 +75,7 @@ func (s *KuromojiIterationMarkCharFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NormalizeKanji", err) } s.NormalizeKanji = value case bool: @@ -83,12 +84,12 @@ func (s *KuromojiIterationMarkCharFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/kuromojipartofspeechtokenfilter.go b/typedapi/types/kuromojipartofspeechtokenfilter.go index 47f0774863..a315d138a5 100644 --- a/typedapi/types/kuromojipartofspeechtokenfilter.go +++ b/typedapi/types/kuromojipartofspeechtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // KuromojiPartOfSpeechTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/kuromoji-plugin.ts#L37-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/kuromoji-plugin.ts#L37-L40 type KuromojiPartOfSpeechTokenFilter struct { Stoptags []string `json:"stoptags"` Type string `json:"type,omitempty"` @@ -53,17 +54,17 @@ func (s *KuromojiPartOfSpeechTokenFilter) UnmarshalJSON(data []byte) error { case "stoptags": if err := dec.Decode(&s.Stoptags); err != nil { - return err + return fmt.Errorf("%s | %w", "Stoptags", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/kuromojireadingformtokenfilter.go b/typedapi/types/kuromojireadingformtokenfilter.go index 1b27174b54..02894ec37f 100644 --- a/typedapi/types/kuromojireadingformtokenfilter.go +++ b/typedapi/types/kuromojireadingformtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // KuromojiReadingFormTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/kuromoji-plugin.ts#L42-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/kuromoji-plugin.ts#L42-L45 type KuromojiReadingFormTokenFilter struct { Type string `json:"type,omitempty"` UseRomaji bool `json:"use_romaji"` @@ -54,7 +55,7 @@ func (s *KuromojiReadingFormTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "use_romaji": @@ -64,7 +65,7 @@ func (s *KuromojiReadingFormTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UseRomaji", err) } s.UseRomaji = value case bool: @@ -73,7 +74,7 @@ func (s *KuromojiReadingFormTokenFilter) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/kuromojistemmertokenfilter.go b/typedapi/types/kuromojistemmertokenfilter.go index 3361f68d50..9d86c8b18f 100644 --- a/typedapi/types/kuromojistemmertokenfilter.go +++ b/typedapi/types/kuromojistemmertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // KuromojiStemmerTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/kuromoji-plugin.ts#L47-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/kuromoji-plugin.ts#L47-L50 type KuromojiStemmerTokenFilter struct { MinimumLength int `json:"minimum_length"` Type string `json:"type,omitempty"` @@ -60,7 +61,7 @@ func (s *KuromojiStemmerTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumLength", err) } s.MinimumLength = value case float64: @@ -70,12 +71,12 @@ func (s *KuromojiStemmerTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/kuromojitokenizer.go b/typedapi/types/kuromojitokenizer.go index 679c94c336..811b187ac9 100644 --- a/typedapi/types/kuromojitokenizer.go +++ b/typedapi/types/kuromojitokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // KuromojiTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/kuromoji-plugin.ts#L58-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/kuromoji-plugin.ts#L58-L67 type KuromojiTokenizer struct { DiscardCompoundToken *bool `json:"discard_compound_token,omitempty"` DiscardPunctuation *bool `json:"discard_punctuation,omitempty"` @@ -67,7 +68,7 @@ func (s *KuromojiTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DiscardCompoundToken", err) } s.DiscardCompoundToken = &value case bool: @@ -81,7 +82,7 @@ func (s *KuromojiTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DiscardPunctuation", err) } s.DiscardPunctuation = &value case bool: @@ -90,7 +91,7 @@ func (s *KuromojiTokenizer) UnmarshalJSON(data []byte) error { case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } case "nbest_cost": @@ -101,7 +102,7 @@ func (s *KuromojiTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NbestCost", err) } s.NbestCost = &value case float64: @@ -112,7 +113,7 @@ func (s *KuromojiTokenizer) UnmarshalJSON(data []byte) error { case "nbest_examples": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NbestExamples", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -123,13 +124,13 @@ func (s *KuromojiTokenizer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "user_dictionary": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UserDictionary", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -140,12 +141,12 @@ func (s *KuromojiTokenizer) UnmarshalJSON(data []byte) error { case "user_dictionary_rules": if err := dec.Decode(&s.UserDictionaryRules); err != nil { - return err + return fmt.Errorf("%s | %w", "UserDictionaryRules", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/languageanalyzer.go b/typedapi/types/languageanalyzer.go index 03374d5c41..e16ee536da 100644 --- a/typedapi/types/languageanalyzer.go +++ b/typedapi/types/languageanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // LanguageAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/analyzers.ts#L52-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/analyzers.ts#L52-L59 type LanguageAnalyzer struct { Language language.Language `json:"language"` StemExclusion []string `json:"stem_exclusion"` @@ -59,12 +60,12 @@ func (s *LanguageAnalyzer) UnmarshalJSON(data []byte) error { case "language": if err := dec.Decode(&s.Language); err != nil { - return err + return fmt.Errorf("%s | %w", "Language", err) } case "stem_exclusion": if err := dec.Decode(&s.StemExclusion); err != nil { - return err + return fmt.Errorf("%s | %w", "StemExclusion", err) } case "stopwords": @@ -73,20 +74,20 @@ func (s *LanguageAnalyzer) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } s.Stopwords = append(s.Stopwords, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } } case "stopwords_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "StopwordsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -97,12 +98,12 @@ func (s *LanguageAnalyzer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/languagecontext.go b/typedapi/types/languagecontext.go index 046d0ad2a7..1ac49e4861 100644 --- a/typedapi/types/languagecontext.go +++ b/typedapi/types/languagecontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // LanguageContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/get_script_languages/types.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/get_script_languages/types.ts#L22-L25 type LanguageContext struct { Contexts []string `json:"contexts"` Language scriptlanguage.ScriptLanguage `json:"language"` diff --git a/typedapi/types/laplacesmoothingmodel.go b/typedapi/types/laplacesmoothingmodel.go index 929c4aa0c2..61bbbb457b 100644 --- a/typedapi/types/laplacesmoothingmodel.go +++ b/typedapi/types/laplacesmoothingmodel.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // LaplaceSmoothingModel type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L427-L432 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L427-L432 type LaplaceSmoothingModel struct { // Alpha A constant that is added to all counts to balance weights. Alpha Float64 `json:"alpha"` @@ -58,7 +59,7 @@ func (s *LaplaceSmoothingModel) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Alpha", err) } f := Float64(value) s.Alpha = f diff --git a/typedapi/types/latest.go b/typedapi/types/latest.go index 76b164712b..1a9417d85b 100644 --- a/typedapi/types/latest.go +++ b/typedapi/types/latest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Latest type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/_types/Transform.ts#L47-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/_types/Transform.ts#L47-L52 type Latest struct { // Sort Specifies the date field that is used to identify the latest documents. Sort string `json:"sort"` @@ -54,12 +55,12 @@ func (s *Latest) UnmarshalJSON(data []byte) error { case "sort": if err := dec.Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } case "unique_key": if err := dec.Decode(&s.UniqueKey); err != nil { - return err + return fmt.Errorf("%s | %w", "UniqueKey", err) } } diff --git a/typedapi/types/latlongeolocation.go b/typedapi/types/latlongeolocation.go index 0a5abe5a5e..123a666e2b 100644 --- a/typedapi/types/latlongeolocation.go +++ b/typedapi/types/latlongeolocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // LatLonGeoLocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Geo.ts#L120-L129 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Geo.ts#L120-L129 type LatLonGeoLocation struct { // Lat Latitude Lat Float64 `json:"lat"` @@ -60,7 +61,7 @@ func (s *LatLonGeoLocation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lat", err) } f := Float64(value) s.Lat = f @@ -76,7 +77,7 @@ func (s *LatLonGeoLocation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lon", err) } f := Float64(value) s.Lon = f diff --git a/typedapi/types/lengthtokenfilter.go b/typedapi/types/lengthtokenfilter.go index 89a15a1264..e78e1ad633 100644 --- a/typedapi/types/lengthtokenfilter.go +++ b/typedapi/types/lengthtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // LengthTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L243-L247 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L243-L247 type LengthTokenFilter struct { Max *int `json:"max,omitempty"` Min *int `json:"min,omitempty"` @@ -61,7 +62,7 @@ func (s *LengthTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } s.Max = &value case float64: @@ -77,7 +78,7 @@ func (s *LengthTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } s.Min = &value case float64: @@ -87,12 +88,12 @@ func (s *LengthTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/lettertokenizer.go b/typedapi/types/lettertokenizer.go index 55b0fecc1c..7f074c0ecd 100644 --- a/typedapi/types/lettertokenizer.go +++ b/typedapi/types/lettertokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // LetterTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L67-L69 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L67-L69 type LetterTokenizer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` @@ -52,12 +53,12 @@ func (s *LetterTokenizer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/license.go b/typedapi/types/license.go index d2ba66f017..e68a6d257e 100644 --- a/typedapi/types/license.go +++ b/typedapi/types/license.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // License type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/license/_types/License.ts#L42-L53 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/license/_types/License.ts#L42-L53 type License struct { ExpiryDateInMillis int64 `json:"expiry_date_in_millis"` IssueDateInMillis int64 `json:"issue_date_in_millis"` @@ -63,18 +64,18 @@ func (s *License) UnmarshalJSON(data []byte) error { case "expiry_date_in_millis": if err := dec.Decode(&s.ExpiryDateInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ExpiryDateInMillis", err) } case "issue_date_in_millis": if err := dec.Decode(&s.IssueDateInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "IssueDateInMillis", err) } case "issued_to": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IssuedTo", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -86,7 +87,7 @@ func (s *License) UnmarshalJSON(data []byte) error { case "issuer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Issuer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -97,7 +98,7 @@ func (s *License) UnmarshalJSON(data []byte) error { case "max_nodes": if err := dec.Decode(&s.MaxNodes); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxNodes", err) } case "max_resource_units": @@ -107,7 +108,7 @@ func (s *License) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxResourceUnits", err) } s.MaxResourceUnits = &value case float64: @@ -118,7 +119,7 @@ func (s *License) UnmarshalJSON(data []byte) error { case "signature": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Signature", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -129,18 +130,18 @@ func (s *License) UnmarshalJSON(data []byte) error { case "start_date_in_millis": if err := dec.Decode(&s.StartDateInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StartDateInMillis", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "uid": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Uid", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/licenseinformation.go b/typedapi/types/licenseinformation.go index 2293a7b931..b4be670d29 100644 --- a/typedapi/types/licenseinformation.go +++ b/typedapi/types/licenseinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // LicenseInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/license/get/types.ts#L25-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/license/get/types.ts#L25-L38 type LicenseInformation struct { ExpiryDate DateTime `json:"expiry_date,omitempty"` ExpiryDateInMillis *int64 `json:"expiry_date_in_millis,omitempty"` @@ -66,28 +67,28 @@ func (s *LicenseInformation) UnmarshalJSON(data []byte) error { case "expiry_date": if err := dec.Decode(&s.ExpiryDate); err != nil { - return err + return fmt.Errorf("%s | %w", "ExpiryDate", err) } case "expiry_date_in_millis": if err := dec.Decode(&s.ExpiryDateInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ExpiryDateInMillis", err) } case "issue_date": if err := dec.Decode(&s.IssueDate); err != nil { - return err + return fmt.Errorf("%s | %w", "IssueDate", err) } case "issue_date_in_millis": if err := dec.Decode(&s.IssueDateInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "IssueDateInMillis", err) } case "issued_to": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IssuedTo", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -99,7 +100,7 @@ func (s *LicenseInformation) UnmarshalJSON(data []byte) error { case "issuer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Issuer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -110,32 +111,32 @@ func (s *LicenseInformation) UnmarshalJSON(data []byte) error { case "max_nodes": if err := dec.Decode(&s.MaxNodes); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxNodes", err) } case "max_resource_units": if err := dec.Decode(&s.MaxResourceUnits); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxResourceUnits", err) } case "start_date_in_millis": if err := dec.Decode(&s.StartDateInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StartDateInMillis", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "uid": if err := dec.Decode(&s.Uid); err != nil { - return err + return fmt.Errorf("%s | %w", "Uid", err) } } diff --git a/typedapi/types/lifecycle.go b/typedapi/types/lifecycle.go index bbcc00dc68..8c098b2531 100644 --- a/typedapi/types/lifecycle.go +++ b/typedapi/types/lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Lifecycle type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/get_lifecycle/types.ts#L24-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/get_lifecycle/types.ts#L24-L28 type Lifecycle struct { ModifiedDate DateTime `json:"modified_date"` Policy IlmPolicy `json:"policy"` @@ -53,17 +54,17 @@ func (s *Lifecycle) UnmarshalJSON(data []byte) error { case "modified_date": if err := dec.Decode(&s.ModifiedDate); err != nil { - return err + return fmt.Errorf("%s | %w", "ModifiedDate", err) } case "policy": if err := dec.Decode(&s.Policy); err != nil { - return err + return fmt.Errorf("%s | %w", "Policy", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/lifecycleexplain.go b/typedapi/types/lifecycleexplain.go index 83622adc0c..f528306db8 100644 --- a/typedapi/types/lifecycleexplain.go +++ b/typedapi/types/lifecycleexplain.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // LifecycleExplainManaged // LifecycleExplainUnmanaged // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/explain_lifecycle/types.ts#L59-L62 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/explain_lifecycle/types.ts#L59-L62 type LifecycleExplain interface{} diff --git a/typedapi/types/lifecycleexplainmanaged.go b/typedapi/types/lifecycleexplainmanaged.go index 48dfbd2f5c..7bda5d7477 100644 --- a/typedapi/types/lifecycleexplainmanaged.go +++ b/typedapi/types/lifecycleexplainmanaged.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // LifecycleExplainManaged type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/explain_lifecycle/types.ts#L26-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/explain_lifecycle/types.ts#L26-L52 type LifecycleExplainManaged struct { Action *string `json:"action,omitempty"` ActionTime DateTime `json:"action_time,omitempty"` @@ -74,27 +75,27 @@ func (s *LifecycleExplainManaged) UnmarshalJSON(data []byte) error { case "action": if err := dec.Decode(&s.Action); err != nil { - return err + return fmt.Errorf("%s | %w", "Action", err) } case "action_time": if err := dec.Decode(&s.ActionTime); err != nil { - return err + return fmt.Errorf("%s | %w", "ActionTime", err) } case "action_time_millis": if err := dec.Decode(&s.ActionTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ActionTimeMillis", err) } case "age": if err := dec.Decode(&s.Age); err != nil { - return err + return fmt.Errorf("%s | %w", "Age", err) } case "failed_step": if err := dec.Decode(&s.FailedStep); err != nil { - return err + return fmt.Errorf("%s | %w", "FailedStep", err) } case "failed_step_retry_count": @@ -105,7 +106,7 @@ func (s *LifecycleExplainManaged) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FailedStepRetryCount", err) } s.FailedStepRetryCount = &value case float64: @@ -115,17 +116,17 @@ func (s *LifecycleExplainManaged) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "index_creation_date": if err := dec.Decode(&s.IndexCreationDate); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexCreationDate", err) } case "index_creation_date_millis": if err := dec.Decode(&s.IndexCreationDateMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexCreationDateMillis", err) } case "is_auto_retryable_error": @@ -135,7 +136,7 @@ func (s *LifecycleExplainManaged) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsAutoRetryableError", err) } s.IsAutoRetryableError = &value case bool: @@ -144,47 +145,47 @@ func (s *LifecycleExplainManaged) UnmarshalJSON(data []byte) error { case "lifecycle_date": if err := dec.Decode(&s.LifecycleDate); err != nil { - return err + return fmt.Errorf("%s | %w", "LifecycleDate", err) } case "lifecycle_date_millis": if err := dec.Decode(&s.LifecycleDateMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "LifecycleDateMillis", err) } case "managed": if err := dec.Decode(&s.Managed); err != nil { - return err + return fmt.Errorf("%s | %w", "Managed", err) } case "phase": if err := dec.Decode(&s.Phase); err != nil { - return err + return fmt.Errorf("%s | %w", "Phase", err) } case "phase_execution": if err := dec.Decode(&s.PhaseExecution); err != nil { - return err + return fmt.Errorf("%s | %w", "PhaseExecution", err) } case "phase_time": if err := dec.Decode(&s.PhaseTime); err != nil { - return err + return fmt.Errorf("%s | %w", "PhaseTime", err) } case "phase_time_millis": if err := dec.Decode(&s.PhaseTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "PhaseTimeMillis", err) } case "policy": if err := dec.Decode(&s.Policy); err != nil { - return err + return fmt.Errorf("%s | %w", "Policy", err) } case "step": if err := dec.Decode(&s.Step); err != nil { - return err + return fmt.Errorf("%s | %w", "Step", err) } case "step_info": @@ -192,22 +193,22 @@ func (s *LifecycleExplainManaged) UnmarshalJSON(data []byte) error { s.StepInfo = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.StepInfo); err != nil { - return err + return fmt.Errorf("%s | %w", "StepInfo", err) } case "step_time": if err := dec.Decode(&s.StepTime); err != nil { - return err + return fmt.Errorf("%s | %w", "StepTime", err) } case "step_time_millis": if err := dec.Decode(&s.StepTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StepTimeMillis", err) } case "time_since_index_creation": if err := dec.Decode(&s.TimeSinceIndexCreation); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSinceIndexCreation", err) } } diff --git a/typedapi/types/lifecycleexplainphaseexecution.go b/typedapi/types/lifecycleexplainphaseexecution.go index 7e5f8fca44..186a70861d 100644 --- a/typedapi/types/lifecycleexplainphaseexecution.go +++ b/typedapi/types/lifecycleexplainphaseexecution.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // LifecycleExplainPhaseExecution type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/explain_lifecycle/types.ts#L64-L68 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/explain_lifecycle/types.ts#L64-L68 type LifecycleExplainPhaseExecution struct { ModifiedDateInMillis int64 `json:"modified_date_in_millis"` Policy string `json:"policy"` @@ -53,17 +54,17 @@ func (s *LifecycleExplainPhaseExecution) UnmarshalJSON(data []byte) error { case "modified_date_in_millis": if err := dec.Decode(&s.ModifiedDateInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ModifiedDateInMillis", err) } case "policy": if err := dec.Decode(&s.Policy); err != nil { - return err + return fmt.Errorf("%s | %w", "Policy", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/lifecycleexplainunmanaged.go b/typedapi/types/lifecycleexplainunmanaged.go index b57f6de12b..28ad662d7c 100644 --- a/typedapi/types/lifecycleexplainunmanaged.go +++ b/typedapi/types/lifecycleexplainunmanaged.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // LifecycleExplainUnmanaged type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/explain_lifecycle/types.ts#L54-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/explain_lifecycle/types.ts#L54-L57 type LifecycleExplainUnmanaged struct { Index string `json:"index"` Managed bool `json:"managed,omitempty"` @@ -52,12 +53,12 @@ func (s *LifecycleExplainUnmanaged) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "managed": if err := dec.Decode(&s.Managed); err != nil { - return err + return fmt.Errorf("%s | %w", "Managed", err) } } diff --git a/typedapi/types/like.go b/typedapi/types/like.go index c61761931e..6702a0bfda 100644 --- a/typedapi/types/like.go +++ b/typedapi/types/like.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // LikeDocument // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L186-L191 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L186-L191 type Like interface{} diff --git a/typedapi/types/likedocument.go b/typedapi/types/likedocument.go index 7fafd4f4c9..c433d414ee 100644 --- a/typedapi/types/likedocument.go +++ b/typedapi/types/likedocument.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/versiontype" @@ -31,7 +32,7 @@ import ( // LikeDocument type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L165-L184 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L165-L184 type LikeDocument struct { // Doc A document not present in the index. Doc json.RawMessage `json:"doc,omitempty"` @@ -63,22 +64,22 @@ func (s *LikeDocument) UnmarshalJSON(data []byte) error { case "doc": if err := dec.Decode(&s.Doc); err != nil { - return err + return fmt.Errorf("%s | %w", "Doc", err) } case "fields": if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "per_field_analyzer": @@ -86,22 +87,22 @@ func (s *LikeDocument) UnmarshalJSON(data []byte) error { s.PerFieldAnalyzer = make(map[string]string, 0) } if err := dec.Decode(&s.PerFieldAnalyzer); err != nil { - return err + return fmt.Errorf("%s | %w", "PerFieldAnalyzer", err) } case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "version_type": if err := dec.Decode(&s.VersionType); err != nil { - return err + return fmt.Errorf("%s | %w", "VersionType", err) } } diff --git a/typedapi/types/limits.go b/typedapi/types/limits.go index 262c5a34b3..df16994297 100644 --- a/typedapi/types/limits.go +++ b/typedapi/types/limits.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Limits type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/info/types.ts#L34-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/info/types.ts#L34-L38 type Limits struct { EffectiveMaxModelMemoryLimit string `json:"effective_max_model_memory_limit"` MaxModelMemoryLimit *string `json:"max_model_memory_limit,omitempty"` @@ -55,7 +56,7 @@ func (s *Limits) UnmarshalJSON(data []byte) error { case "effective_max_model_memory_limit": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "EffectiveMaxModelMemoryLimit", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -67,7 +68,7 @@ func (s *Limits) UnmarshalJSON(data []byte) error { case "max_model_memory_limit": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxModelMemoryLimit", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *Limits) UnmarshalJSON(data []byte) error { case "total_ml_memory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalMlMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/limittokencounttokenfilter.go b/typedapi/types/limittokencounttokenfilter.go index d74e902a3f..935e6bb5c6 100644 --- a/typedapi/types/limittokencounttokenfilter.go +++ b/typedapi/types/limittokencounttokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // LimitTokenCountTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L249-L253 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L249-L253 type LimitTokenCountTokenFilter struct { ConsumeAllTokens *bool `json:"consume_all_tokens,omitempty"` MaxTokenCount Stringifiedinteger `json:"max_token_count,omitempty"` @@ -60,7 +61,7 @@ func (s *LimitTokenCountTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ConsumeAllTokens", err) } s.ConsumeAllTokens = &value case bool: @@ -69,17 +70,17 @@ func (s *LimitTokenCountTokenFilter) UnmarshalJSON(data []byte) error { case "max_token_count": if err := dec.Decode(&s.MaxTokenCount); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTokenCount", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/linearinterpolationsmoothingmodel.go b/typedapi/types/linearinterpolationsmoothingmodel.go index 22892c354d..4fc68fa9f2 100644 --- a/typedapi/types/linearinterpolationsmoothingmodel.go +++ b/typedapi/types/linearinterpolationsmoothingmodel.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // LinearInterpolationSmoothingModel type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L434-L438 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L434-L438 type LinearInterpolationSmoothingModel struct { BigramLambda Float64 `json:"bigram_lambda"` TrigramLambda Float64 `json:"trigram_lambda"` @@ -59,7 +60,7 @@ func (s *LinearInterpolationSmoothingModel) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BigramLambda", err) } f := Float64(value) s.BigramLambda = f @@ -75,7 +76,7 @@ func (s *LinearInterpolationSmoothingModel) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TrigramLambda", err) } f := Float64(value) s.TrigramLambda = f @@ -91,7 +92,7 @@ func (s *LinearInterpolationSmoothingModel) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UnigramLambda", err) } f := Float64(value) s.UnigramLambda = f diff --git a/typedapi/types/linearmovingaverageaggregation.go b/typedapi/types/linearmovingaverageaggregation.go index 007046eb97..59fe407bb3 100644 --- a/typedapi/types/linearmovingaverageaggregation.go +++ b/typedapi/types/linearmovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // LinearMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L242-L245 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L242-L245 type LinearMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -68,13 +69,13 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,12 +86,12 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "minimize": @@ -100,7 +101,7 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Minimize", err) } s.Minimize = &value case bool: @@ -109,13 +110,13 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "model": if err := dec.Decode(&s.Model); err != nil { - return err + return fmt.Errorf("%s | %w", "Model", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -132,7 +133,7 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Predict", err) } s.Predict = &value case float64: @@ -142,7 +143,7 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "settings": if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "window": @@ -153,7 +154,7 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Window", err) } s.Window = &value case float64: diff --git a/typedapi/types/loggingaction.go b/typedapi/types/loggingaction.go index a4c7fa0417..8fef227ee6 100644 --- a/typedapi/types/loggingaction.go +++ b/typedapi/types/loggingaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // LoggingAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L281-L285 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L281-L285 type LoggingAction struct { Category *string `json:"category,omitempty"` Level *string `json:"level,omitempty"` @@ -55,7 +56,7 @@ func (s *LoggingAction) UnmarshalJSON(data []byte) error { case "category": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Category", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -67,7 +68,7 @@ func (s *LoggingAction) UnmarshalJSON(data []byte) error { case "level": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Level", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *LoggingAction) UnmarshalJSON(data []byte) error { case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/loggingresult.go b/typedapi/types/loggingresult.go index 4ba18240b2..79586ae740 100644 --- a/typedapi/types/loggingresult.go +++ b/typedapi/types/loggingresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // LoggingResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L287-L289 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L287-L289 type LoggingResult struct { LoggedText string `json:"logged_text"` } @@ -53,7 +54,7 @@ func (s *LoggingResult) UnmarshalJSON(data []byte) error { case "logged_text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LoggedText", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/logstashpipeline.go b/typedapi/types/logstashpipeline.go index 4429523615..74c7480506 100644 --- a/typedapi/types/logstashpipeline.go +++ b/typedapi/types/logstashpipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // LogstashPipeline type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/logstash/_types/Pipeline.ts#L60-L92 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/logstash/_types/Pipeline.ts#L60-L92 type LogstashPipeline struct { // Description Description of the pipeline. // This description is not used by Elasticsearch or Logstash. @@ -69,7 +70,7 @@ func (s *LogstashPipeline) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,13 +81,13 @@ func (s *LogstashPipeline) UnmarshalJSON(data []byte) error { case "last_modified": if err := dec.Decode(&s.LastModified); err != nil { - return err + return fmt.Errorf("%s | %w", "LastModified", err) } case "pipeline": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pipeline", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -97,18 +98,18 @@ func (s *LogstashPipeline) UnmarshalJSON(data []byte) error { case "pipeline_metadata": if err := dec.Decode(&s.PipelineMetadata); err != nil { - return err + return fmt.Errorf("%s | %w", "PipelineMetadata", err) } case "pipeline_settings": if err := dec.Decode(&s.PipelineSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "PipelineSettings", err) } case "username": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/longnumberproperty.go b/typedapi/types/longnumberproperty.go index be3c3bbb0a..b53f01e9cc 100644 --- a/typedapi/types/longnumberproperty.go +++ b/typedapi/types/longnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // LongNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L154-L157 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L154-L157 type LongNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -84,7 +85,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -100,7 +101,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -113,13 +114,13 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -130,7 +131,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -139,7 +140,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -457,7 +458,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -472,7 +473,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -486,7 +487,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -498,7 +499,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": @@ -508,7 +509,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } s.NullValue = &value case float64: @@ -518,7 +519,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case "on_script_error": if err := dec.Decode(&s.OnScriptError); err != nil { - return err + return fmt.Errorf("%s | %w", "OnScriptError", err) } case "properties": @@ -831,7 +832,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -840,7 +841,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -849,7 +850,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -857,7 +858,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -867,7 +868,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -883,7 +884,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -897,7 +898,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesDimension", err) } s.TimeSeriesDimension = &value case bool: @@ -906,12 +907,12 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case "time_series_metric": if err := dec.Decode(&s.TimeSeriesMetric); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesMetric", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/longrangeproperty.go b/typedapi/types/longrangeproperty.go index cbbe03ea12..96d9bc7c20 100644 --- a/typedapi/types/longrangeproperty.go +++ b/typedapi/types/longrangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // LongRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/range.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/range.ts#L50-L52 type LongRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -72,7 +73,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -88,7 +89,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -101,13 +102,13 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -118,7 +119,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -127,7 +128,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -445,7 +446,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -460,7 +461,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -472,7 +473,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -785,7 +786,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -801,7 +802,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -810,7 +811,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/longraretermsaggregate.go b/typedapi/types/longraretermsaggregate.go index eddd3c4fe9..632e21bdb1 100644 --- a/typedapi/types/longraretermsaggregate.go +++ b/typedapi/types/longraretermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // LongRareTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L431-L436 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L431-L436 type LongRareTermsAggregate struct { Buckets BucketsLongRareTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *LongRareTermsAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]LongRareTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []LongRareTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/longraretermsbucket.go b/typedapi/types/longraretermsbucket.go index a1c9f75ac3..33fa9ed398 100644 --- a/typedapi/types/longraretermsbucket.go +++ b/typedapi/types/longraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // LongRareTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L438-L441 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L438-L441 type LongRareTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -62,7 +62,7 @@ func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -77,7 +77,7 @@ func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } s.Key = value case float64: @@ -88,7 +88,7 @@ func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error { case "key_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "KeyAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -111,490 +111,490 @@ func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -604,7 +604,7 @@ func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/longtermsaggregate.go b/typedapi/types/longtermsaggregate.go index 2c35b0d3fa..daaffd8c43 100644 --- a/typedapi/types/longtermsaggregate.go +++ b/typedapi/types/longtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // LongTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L399-L404 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L399-L404 type LongTermsAggregate struct { Buckets BucketsLongTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -63,13 +64,13 @@ func (s *LongTermsAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]LongTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []LongTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -81,7 +82,7 @@ func (s *LongTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } s.DocCountErrorUpperBound = &value case float64: @@ -91,7 +92,7 @@ func (s *LongTermsAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "sum_other_doc_count": @@ -101,7 +102,7 @@ func (s *LongTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SumOtherDocCount", err) } s.SumOtherDocCount = &value case float64: diff --git a/typedapi/types/longtermsbucket.go b/typedapi/types/longtermsbucket.go index d828aae7cc..8cbb5330a1 100644 --- a/typedapi/types/longtermsbucket.go +++ b/typedapi/types/longtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // LongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L406-L409 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L406-L409 type LongTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -63,7 +63,7 @@ func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -78,7 +78,7 @@ func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountError", err) } s.DocCountError = &value case float64: @@ -93,7 +93,7 @@ func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } s.Key = value case float64: @@ -104,7 +104,7 @@ func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { case "key_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "KeyAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -127,490 +127,490 @@ func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -620,7 +620,7 @@ func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/lowercasenormalizer.go b/typedapi/types/lowercasenormalizer.go index a8f4623270..ca00e3fde1 100644 --- a/typedapi/types/lowercasenormalizer.go +++ b/typedapi/types/lowercasenormalizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // LowercaseNormalizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/normalizers.ts#L26-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/normalizers.ts#L26-L28 type LowercaseNormalizer struct { Type string `json:"type,omitempty"` } diff --git a/typedapi/types/lowercaseprocessor.go b/typedapi/types/lowercaseprocessor.go index 988b82124f..547f841d58 100644 --- a/typedapi/types/lowercaseprocessor.go +++ b/typedapi/types/lowercaseprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // LowercaseProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L910-L926 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L910-L926 type LowercaseProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -72,7 +73,7 @@ func (s *LowercaseProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,13 +84,13 @@ func (s *LowercaseProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +106,7 @@ func (s *LowercaseProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -119,7 +120,7 @@ func (s *LowercaseProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -128,13 +129,13 @@ func (s *LowercaseProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -145,7 +146,7 @@ func (s *LowercaseProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/lowercasetokenfilter.go b/typedapi/types/lowercasetokenfilter.go index 4d4518c371..efa6fec43b 100644 --- a/typedapi/types/lowercasetokenfilter.go +++ b/typedapi/types/lowercasetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // LowercaseTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L255-L258 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L255-L258 type LowercaseTokenFilter struct { Language *string `json:"language,omitempty"` Type string `json:"type,omitempty"` @@ -55,7 +56,7 @@ func (s *LowercaseTokenFilter) UnmarshalJSON(data []byte) error { case "language": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Language", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,12 +67,12 @@ func (s *LowercaseTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/lowercasetokenizer.go b/typedapi/types/lowercasetokenizer.go index 730999990a..e0747c3827 100644 --- a/typedapi/types/lowercasetokenizer.go +++ b/typedapi/types/lowercasetokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // LowercaseTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L71-L73 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L71-L73 type LowercaseTokenizer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` @@ -52,12 +53,12 @@ func (s *LowercaseTokenizer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/machinelearning.go b/typedapi/types/machinelearning.go index d1be83072f..54943c5868 100644 --- a/typedapi/types/machinelearning.go +++ b/typedapi/types/machinelearning.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MachineLearning type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L372-L379 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L372-L379 type MachineLearning struct { Available bool `json:"available"` DataFrameAnalyticsJobs MlDataFrameAnalyticsJobs `json:"data_frame_analytics_jobs"` @@ -65,7 +66,7 @@ func (s *MachineLearning) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -74,7 +75,7 @@ func (s *MachineLearning) UnmarshalJSON(data []byte) error { case "data_frame_analytics_jobs": if err := dec.Decode(&s.DataFrameAnalyticsJobs); err != nil { - return err + return fmt.Errorf("%s | %w", "DataFrameAnalyticsJobs", err) } case "datafeeds": @@ -82,7 +83,7 @@ func (s *MachineLearning) UnmarshalJSON(data []byte) error { s.Datafeeds = make(map[string]XpackDatafeed, 0) } if err := dec.Decode(&s.Datafeeds); err != nil { - return err + return fmt.Errorf("%s | %w", "Datafeeds", err) } case "enabled": @@ -92,7 +93,7 @@ func (s *MachineLearning) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -101,7 +102,7 @@ func (s *MachineLearning) UnmarshalJSON(data []byte) error { case "inference": if err := dec.Decode(&s.Inference); err != nil { - return err + return fmt.Errorf("%s | %w", "Inference", err) } case "jobs": @@ -109,7 +110,7 @@ func (s *MachineLearning) UnmarshalJSON(data []byte) error { s.Jobs = make(map[string]JobUsage, 0) } if err := dec.Decode(&s.Jobs); err != nil { - return err + return fmt.Errorf("%s | %w", "Jobs", err) } case "node_count": @@ -120,7 +121,7 @@ func (s *MachineLearning) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NodeCount", err) } s.NodeCount = value case float64: diff --git a/typedapi/types/manageuserprivileges.go b/typedapi/types/manageuserprivileges.go index e49e449af6..749aac0c9c 100644 --- a/typedapi/types/manageuserprivileges.go +++ b/typedapi/types/manageuserprivileges.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ManageUserPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/Privileges.ts#L197-L199 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/Privileges.ts#L197-L199 type ManageUserPrivileges struct { Applications []string `json:"applications"` } diff --git a/typedapi/types/mapboxvectortiles.go b/typedapi/types/mapboxvectortiles.go index 0ed7485d8b..7db6d95d95 100644 --- a/typedapi/types/mapboxvectortiles.go +++ b/typedapi/types/mapboxvectortiles.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // MapboxVectorTiles type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Binary.ts#L21-L21 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Binary.ts#L21-L21 type MapboxVectorTiles []byte diff --git a/typedapi/types/mappingcharfilter.go b/typedapi/types/mappingcharfilter.go index 3959fe850e..af351415c3 100644 --- a/typedapi/types/mappingcharfilter.go +++ b/typedapi/types/mappingcharfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MappingCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/char_filters.ts#L47-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/char_filters.ts#L47-L51 type MappingCharFilter struct { Mappings []string `json:"mappings,omitempty"` MappingsPath *string `json:"mappings_path,omitempty"` @@ -55,13 +56,13 @@ func (s *MappingCharFilter) UnmarshalJSON(data []byte) error { case "mappings": if err := dec.Decode(&s.Mappings); err != nil { - return err + return fmt.Errorf("%s | %w", "Mappings", err) } case "mappings_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MappingsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,12 +73,12 @@ func (s *MappingCharFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/mappinglimitsettings.go b/typedapi/types/mappinglimitsettings.go index 6bcdb08863..46b1b7e7bf 100644 --- a/typedapi/types/mappinglimitsettings.go +++ b/typedapi/types/mappinglimitsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MappingLimitSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L405-L418 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L409-L422 type MappingLimitSettings struct { Coerce *bool `json:"coerce,omitempty"` Depth *MappingLimitSettingsDepth `json:"depth,omitempty"` @@ -64,7 +65,7 @@ func (s *MappingLimitSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -73,17 +74,17 @@ func (s *MappingLimitSettings) UnmarshalJSON(data []byte) error { case "depth": if err := dec.Decode(&s.Depth); err != nil { - return err + return fmt.Errorf("%s | %w", "Depth", err) } case "dimension_fields": if err := dec.Decode(&s.DimensionFields); err != nil { - return err + return fmt.Errorf("%s | %w", "DimensionFields", err) } case "field_name_length": if err := dec.Decode(&s.FieldNameLength); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldNameLength", err) } case "ignore_malformed": @@ -93,7 +94,7 @@ func (s *MappingLimitSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -102,17 +103,17 @@ func (s *MappingLimitSettings) UnmarshalJSON(data []byte) error { case "nested_fields": if err := dec.Decode(&s.NestedFields); err != nil { - return err + return fmt.Errorf("%s | %w", "NestedFields", err) } case "nested_objects": if err := dec.Decode(&s.NestedObjects); err != nil { - return err + return fmt.Errorf("%s | %w", "NestedObjects", err) } case "total_fields": if err := dec.Decode(&s.TotalFields); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalFields", err) } } diff --git a/typedapi/types/mappinglimitsettingsdepth.go b/typedapi/types/mappinglimitsettingsdepth.go index fe7b90d42e..813e731861 100644 --- a/typedapi/types/mappinglimitsettingsdepth.go +++ b/typedapi/types/mappinglimitsettingsdepth.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MappingLimitSettingsDepth type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L430-L437 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L434-L441 type MappingLimitSettingsDepth struct { // Limit The maximum depth for a field, which is measured as the number of inner // objects. For instance, if all fields are defined @@ -62,7 +63,7 @@ func (s *MappingLimitSettingsDepth) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Limit", err) } s.Limit = &value case float64: diff --git a/typedapi/types/mappinglimitsettingsdimensionfields.go b/typedapi/types/mappinglimitsettingsdimensionfields.go index eab6cea857..1a4e39ee36 100644 --- a/typedapi/types/mappinglimitsettingsdimensionfields.go +++ b/typedapi/types/mappinglimitsettingsdimensionfields.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MappingLimitSettingsDimensionFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L467-L473 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L471-L477 type MappingLimitSettingsDimensionFields struct { // Limit [preview] This functionality is in technical preview and may be changed or // removed in a future release. @@ -62,7 +63,7 @@ func (s *MappingLimitSettingsDimensionFields) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Limit", err) } s.Limit = &value case float64: diff --git a/typedapi/types/mappinglimitsettingsfieldnamelength.go b/typedapi/types/mappinglimitsettingsfieldnamelength.go index eab12c7176..71b1f36f45 100644 --- a/typedapi/types/mappinglimitsettingsfieldnamelength.go +++ b/typedapi/types/mappinglimitsettingsfieldnamelength.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MappingLimitSettingsFieldNameLength type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L458-L465 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L462-L469 type MappingLimitSettingsFieldNameLength struct { // Limit Setting for the maximum length of a field name. This setting isn’t really // something that addresses mappings explosion but @@ -63,7 +64,7 @@ func (s *MappingLimitSettingsFieldNameLength) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Limit", err) } s.Limit = &value case float64: diff --git a/typedapi/types/mappinglimitsettingsnestedfields.go b/typedapi/types/mappinglimitsettingsnestedfields.go index 6f83da0e48..1383a3794c 100644 --- a/typedapi/types/mappinglimitsettingsnestedfields.go +++ b/typedapi/types/mappinglimitsettingsnestedfields.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MappingLimitSettingsNestedFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L439-L447 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L443-L451 type MappingLimitSettingsNestedFields struct { // Limit The maximum number of distinct nested mappings in an index. The nested type // should only be used in special cases, when @@ -63,7 +64,7 @@ func (s *MappingLimitSettingsNestedFields) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Limit", err) } s.Limit = &value case float64: diff --git a/typedapi/types/mappinglimitsettingsnestedobjects.go b/typedapi/types/mappinglimitsettingsnestedobjects.go index 7f02e6099d..c59c540d33 100644 --- a/typedapi/types/mappinglimitsettingsnestedobjects.go +++ b/typedapi/types/mappinglimitsettingsnestedobjects.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MappingLimitSettingsNestedObjects type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L449-L456 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L453-L460 type MappingLimitSettingsNestedObjects struct { // Limit The maximum number of nested JSON objects that a single document can contain // across all nested types. This limit helps @@ -62,7 +63,7 @@ func (s *MappingLimitSettingsNestedObjects) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Limit", err) } s.Limit = &value case float64: diff --git a/typedapi/types/mappinglimitsettingstotalfields.go b/typedapi/types/mappinglimitsettingstotalfields.go index 662447d539..36aaf02b31 100644 --- a/typedapi/types/mappinglimitsettingstotalfields.go +++ b/typedapi/types/mappinglimitsettingstotalfields.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MappingLimitSettingsTotalFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L420-L428 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L424-L432 type MappingLimitSettingsTotalFields struct { // Limit The maximum number of fields in an index. Field and object mappings, as well // as field aliases count towards this limit. @@ -64,7 +65,7 @@ func (s *MappingLimitSettingsTotalFields) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Limit", err) } s.Limit = &value case float64: diff --git a/typedapi/types/mappingstats.go b/typedapi/types/mappingstats.go index 24ed04d857..a00bf94a37 100644 --- a/typedapi/types/mappingstats.go +++ b/typedapi/types/mappingstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MappingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L186-L190 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L186-L190 type MappingStats struct { TotalCount int64 `json:"total_count"` TotalEstimatedOverhead ByteSize `json:"total_estimated_overhead,omitempty"` @@ -59,7 +60,7 @@ func (s *MappingStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalCount", err) } s.TotalCount = value case float64: @@ -69,7 +70,7 @@ func (s *MappingStats) UnmarshalJSON(data []byte) error { case "total_estimated_overhead": if err := dec.Decode(&s.TotalEstimatedOverhead); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalEstimatedOverhead", err) } case "total_estimated_overhead_in_bytes": @@ -79,7 +80,7 @@ func (s *MappingStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalEstimatedOverheadInBytes", err) } s.TotalEstimatedOverheadInBytes = value case float64: diff --git a/typedapi/types/masterisstableindicator.go b/typedapi/types/masterisstableindicator.go index 4303447e69..57f4d31fbb 100644 --- a/typedapi/types/masterisstableindicator.go +++ b/typedapi/types/masterisstableindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // MasterIsStableIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L79-L83 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L79-L83 type MasterIsStableIndicator struct { Details *MasterIsStableIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` @@ -58,28 +59,28 @@ func (s *MasterIsStableIndicator) UnmarshalJSON(data []byte) error { case "details": if err := dec.Decode(&s.Details); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } case "diagnosis": if err := dec.Decode(&s.Diagnosis); err != nil { - return err + return fmt.Errorf("%s | %w", "Diagnosis", err) } case "impacts": if err := dec.Decode(&s.Impacts); err != nil { - return err + return fmt.Errorf("%s | %w", "Impacts", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "symptom": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Symptom", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/masterisstableindicatorclusterformationnode.go b/typedapi/types/masterisstableindicatorclusterformationnode.go index cfee18dec5..c35cd63e1b 100644 --- a/typedapi/types/masterisstableindicatorclusterformationnode.go +++ b/typedapi/types/masterisstableindicatorclusterformationnode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MasterIsStableIndicatorClusterFormationNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L98-L102 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L98-L102 type MasterIsStableIndicatorClusterFormationNode struct { ClusterFormationMessage string `json:"cluster_formation_message"` Name *string `json:"name,omitempty"` @@ -55,7 +56,7 @@ func (s *MasterIsStableIndicatorClusterFormationNode) UnmarshalJSON(data []byte) case "cluster_formation_message": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ClusterFormationMessage", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -67,7 +68,7 @@ func (s *MasterIsStableIndicatorClusterFormationNode) UnmarshalJSON(data []byte) case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *MasterIsStableIndicatorClusterFormationNode) UnmarshalJSON(data []byte) case "node_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/masterisstableindicatordetails.go b/typedapi/types/masterisstableindicatordetails.go index cdee5de55d..af54843195 100644 --- a/typedapi/types/masterisstableindicatordetails.go +++ b/typedapi/types/masterisstableindicatordetails.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // MasterIsStableIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L84-L89 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L84-L89 type MasterIsStableIndicatorDetails struct { ClusterFormation []MasterIsStableIndicatorClusterFormationNode `json:"cluster_formation,omitempty"` CurrentMaster IndicatorNode `json:"current_master"` diff --git a/typedapi/types/masterisstableindicatorexceptionfetchinghistory.go b/typedapi/types/masterisstableindicatorexceptionfetchinghistory.go index e1934ec663..36433b80d9 100644 --- a/typedapi/types/masterisstableindicatorexceptionfetchinghistory.go +++ b/typedapi/types/masterisstableindicatorexceptionfetchinghistory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MasterIsStableIndicatorExceptionFetchingHistory type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L94-L97 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L94-L97 type MasterIsStableIndicatorExceptionFetchingHistory struct { Message string `json:"message"` StackTrace string `json:"stack_trace"` @@ -54,7 +55,7 @@ func (s *MasterIsStableIndicatorExceptionFetchingHistory) UnmarshalJSON(data []b case "message": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Message", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,7 +67,7 @@ func (s *MasterIsStableIndicatorExceptionFetchingHistory) UnmarshalJSON(data []b case "stack_trace": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "StackTrace", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/masterrecord.go b/typedapi/types/masterrecord.go index 5d71a3743c..b977ee473b 100644 --- a/typedapi/types/masterrecord.go +++ b/typedapi/types/masterrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MasterRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/master/types.ts#L20-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/master/types.ts#L20-L39 type MasterRecord struct { // Host host name Host *string `json:"host,omitempty"` @@ -60,7 +61,7 @@ func (s *MasterRecord) UnmarshalJSON(data []byte) error { case "host", "h": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,7 +73,7 @@ func (s *MasterRecord) UnmarshalJSON(data []byte) error { case "id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,7 +85,7 @@ func (s *MasterRecord) UnmarshalJSON(data []byte) error { case "ip": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -96,7 +97,7 @@ func (s *MasterRecord) UnmarshalJSON(data []byte) error { case "node", "n": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/matchallquery.go b/typedapi/types/matchallquery.go index 3cac09ade5..e704d3c3d3 100644 --- a/typedapi/types/matchallquery.go +++ b/typedapi/types/matchallquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MatchAllQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/MatchAllQuery.ts#L22-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/MatchAllQuery.ts#L22-L22 type MatchAllQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -63,7 +64,7 @@ func (s *MatchAllQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -75,7 +76,7 @@ func (s *MatchAllQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/matchboolprefixquery.go b/typedapi/types/matchboolprefixquery.go index be4cec1b49..1ac13fc12d 100644 --- a/typedapi/types/matchboolprefixquery.go +++ b/typedapi/types/matchboolprefixquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // MatchBoolPrefixQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L349-L403 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L349-L403 type MatchBoolPrefixQuery struct { // Analyzer Analyzer used to convert the text in the query value into tokens. Analyzer *string `json:"analyzer,omitempty"` @@ -102,7 +103,7 @@ func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -118,7 +119,7 @@ func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -129,12 +130,12 @@ func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { case "fuzziness": if err := dec.Decode(&s.Fuzziness); err != nil { - return err + return fmt.Errorf("%s | %w", "Fuzziness", err) } case "fuzzy_rewrite": if err := dec.Decode(&s.FuzzyRewrite); err != nil { - return err + return fmt.Errorf("%s | %w", "FuzzyRewrite", err) } case "fuzzy_transpositions": @@ -144,7 +145,7 @@ func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FuzzyTranspositions", err) } s.FuzzyTranspositions = &value case bool: @@ -159,7 +160,7 @@ func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxExpansions", err) } s.MaxExpansions = &value case float64: @@ -169,12 +170,12 @@ func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { case "minimum_should_match": if err := dec.Decode(&s.MinimumShouldMatch); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatch", err) } case "operator": if err := dec.Decode(&s.Operator); err != nil { - return err + return fmt.Errorf("%s | %w", "Operator", err) } case "prefix_length": @@ -185,7 +186,7 @@ func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrefixLength", err) } s.PrefixLength = &value case float64: @@ -196,7 +197,7 @@ func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -208,7 +209,7 @@ func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/matchedfield.go b/typedapi/types/matchedfield.go index 2b08c41d4c..5f06a72851 100644 --- a/typedapi/types/matchedfield.go +++ b/typedapi/types/matchedfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MatchedField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/text_structure/test_grok_pattern/types.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/text_structure/test_grok_pattern/types.ts#L23-L27 type MatchedField struct { Length int `json:"length"` Match string `json:"match"` @@ -60,7 +61,7 @@ func (s *MatchedField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Length", err) } s.Length = value case float64: @@ -71,7 +72,7 @@ func (s *MatchedField) UnmarshalJSON(data []byte) error { case "match": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Match", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -88,7 +89,7 @@ func (s *MatchedField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Offset", err) } s.Offset = value case float64: diff --git a/typedapi/types/matchedtext.go b/typedapi/types/matchedtext.go index 9ac0980fa8..c7f8c50028 100644 --- a/typedapi/types/matchedtext.go +++ b/typedapi/types/matchedtext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MatchedText type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/text_structure/test_grok_pattern/types.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/text_structure/test_grok_pattern/types.ts#L29-L32 type MatchedText struct { Fields map[string][]MatchedField `json:"fields,omitempty"` Matched bool `json:"matched"` @@ -56,7 +57,7 @@ func (s *MatchedText) UnmarshalJSON(data []byte) error { s.Fields = make(map[string][]MatchedField, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "matched": @@ -66,7 +67,7 @@ func (s *MatchedText) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Matched", err) } s.Matched = value case bool: diff --git a/typedapi/types/matchnonequery.go b/typedapi/types/matchnonequery.go index 4497305cd4..d0467ddd93 100644 --- a/typedapi/types/matchnonequery.go +++ b/typedapi/types/matchnonequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MatchNoneQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/MatchNoneQuery.ts#L22-L22 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/MatchNoneQuery.ts#L22-L22 type MatchNoneQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -63,7 +64,7 @@ func (s *MatchNoneQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -75,7 +76,7 @@ func (s *MatchNoneQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/matchonlytextproperty.go b/typedapi/types/matchonlytextproperty.go index f1b7e710ba..fbee77fa73 100644 --- a/typedapi/types/matchonlytextproperty.go +++ b/typedapi/types/matchonlytextproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MatchOnlyTextProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L215-L240 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L216-L241 type MatchOnlyTextProperty struct { // CopyTo Allows you to copy the values of multiple fields into a group // field, which can then be queried as a single field. @@ -65,13 +66,13 @@ func (s *MatchOnlyTextProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -387,12 +388,12 @@ func (s *MatchOnlyTextProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/matchphraseprefixquery.go b/typedapi/types/matchphraseprefixquery.go index 8ed0ddd9fb..804a389116 100644 --- a/typedapi/types/matchphraseprefixquery.go +++ b/typedapi/types/matchphraseprefixquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // MatchPhrasePrefixQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L428-L454 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L428-L454 type MatchPhrasePrefixQuery struct { // Analyzer Analyzer used to convert text in the query value into tokens. Analyzer *string `json:"analyzer,omitempty"` @@ -82,7 +83,7 @@ func (s *MatchPhrasePrefixQuery) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -98,7 +99,7 @@ func (s *MatchPhrasePrefixQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -115,7 +116,7 @@ func (s *MatchPhrasePrefixQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxExpansions", err) } s.MaxExpansions = &value case float64: @@ -126,7 +127,7 @@ func (s *MatchPhrasePrefixQuery) UnmarshalJSON(data []byte) error { case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -138,7 +139,7 @@ func (s *MatchPhrasePrefixQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -155,7 +156,7 @@ func (s *MatchPhrasePrefixQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Slop", err) } s.Slop = &value case float64: @@ -165,7 +166,7 @@ func (s *MatchPhrasePrefixQuery) UnmarshalJSON(data []byte) error { case "zero_terms_query": if err := dec.Decode(&s.ZeroTermsQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "ZeroTermsQuery", err) } } diff --git a/typedapi/types/matchphrasequery.go b/typedapi/types/matchphrasequery.go index d488f5c9e9..c9fa4448e7 100644 --- a/typedapi/types/matchphrasequery.go +++ b/typedapi/types/matchphrasequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // MatchPhraseQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L405-L426 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L405-L426 type MatchPhraseQuery struct { // Analyzer Analyzer used to convert the text in the query value into tokens. Analyzer *string `json:"analyzer,omitempty"` @@ -79,7 +80,7 @@ func (s *MatchPhraseQuery) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -95,7 +96,7 @@ func (s *MatchPhraseQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -107,7 +108,7 @@ func (s *MatchPhraseQuery) UnmarshalJSON(data []byte) error { case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -119,7 +120,7 @@ func (s *MatchPhraseQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -136,7 +137,7 @@ func (s *MatchPhraseQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Slop", err) } s.Slop = &value case float64: @@ -146,7 +147,7 @@ func (s *MatchPhraseQuery) UnmarshalJSON(data []byte) error { case "zero_terms_query": if err := dec.Decode(&s.ZeroTermsQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "ZeroTermsQuery", err) } } diff --git a/typedapi/types/matchquery.go b/typedapi/types/matchquery.go index b613444a55..c9aa62df2f 100644 --- a/typedapi/types/matchquery.go +++ b/typedapi/types/matchquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // MatchQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L282-L347 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L282-L347 type MatchQuery struct { // Analyzer Analyzer used to convert the text in the query value into tokens. Analyzer *string `json:"analyzer,omitempty"` @@ -100,7 +101,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -116,7 +117,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AutoGenerateSynonymsPhraseQuery", err) } s.AutoGenerateSynonymsPhraseQuery = &value case bool: @@ -130,7 +131,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -146,7 +147,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CutoffFrequency", err) } f := Float64(value) s.CutoffFrequency = &f @@ -157,12 +158,12 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case "fuzziness": if err := dec.Decode(&s.Fuzziness); err != nil { - return err + return fmt.Errorf("%s | %w", "Fuzziness", err) } case "fuzzy_rewrite": if err := dec.Decode(&s.FuzzyRewrite); err != nil { - return err + return fmt.Errorf("%s | %w", "FuzzyRewrite", err) } case "fuzzy_transpositions": @@ -172,7 +173,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FuzzyTranspositions", err) } s.FuzzyTranspositions = &value case bool: @@ -186,7 +187,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lenient", err) } s.Lenient = &value case bool: @@ -201,7 +202,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxExpansions", err) } s.MaxExpansions = &value case float64: @@ -211,12 +212,12 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case "minimum_should_match": if err := dec.Decode(&s.MinimumShouldMatch); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatch", err) } case "operator": if err := dec.Decode(&s.Operator); err != nil { - return err + return fmt.Errorf("%s | %w", "Operator", err) } case "prefix_length": @@ -227,7 +228,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrefixLength", err) } s.PrefixLength = &value case float64: @@ -238,7 +239,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -250,7 +251,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -261,7 +262,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case "zero_terms_query": if err := dec.Decode(&s.ZeroTermsQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "ZeroTermsQuery", err) } } diff --git a/typedapi/types/matrixaggregation.go b/typedapi/types/matrixaggregation.go index 7adc949a14..6a7cffad88 100644 --- a/typedapi/types/matrixaggregation.go +++ b/typedapi/types/matrixaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MatrixAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/matrix.ts#L26-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/matrix.ts#L26-L36 type MatrixAggregation struct { // Fields An array of fields for computing the statistics. Fields []string `json:"fields,omitempty"` @@ -62,19 +63,19 @@ func (s *MatrixAggregation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } s.Fields = append(s.Fields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "missing": @@ -82,13 +83,13 @@ func (s *MatrixAggregation) UnmarshalJSON(data []byte) error { s.Missing = make(map[string]Float64, 0) } if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/matrixstatsaggregate.go b/typedapi/types/matrixstatsaggregate.go index c7c1d040e1..b258299bd1 100644 --- a/typedapi/types/matrixstatsaggregate.go +++ b/typedapi/types/matrixstatsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MatrixStatsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L757-L761 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L757-L761 type MatrixStatsAggregate struct { DocCount int64 `json:"doc_count"` Fields []MatrixStatsFields `json:"fields,omitempty"` @@ -59,7 +60,7 @@ func (s *MatrixStatsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -69,12 +70,12 @@ func (s *MatrixStatsAggregate) UnmarshalJSON(data []byte) error { case "fields": if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/matrixstatsaggregation.go b/typedapi/types/matrixstatsaggregation.go index b5af3a51f1..bcd7bc9c46 100644 --- a/typedapi/types/matrixstatsaggregation.go +++ b/typedapi/types/matrixstatsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // MatrixStatsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/matrix.ts#L38-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/matrix.ts#L38-L44 type MatrixStatsAggregation struct { // Fields An array of fields for computing the statistics. Fields []string `json:"fields,omitempty"` @@ -66,19 +67,19 @@ func (s *MatrixStatsAggregation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } s.Fields = append(s.Fields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "missing": @@ -86,18 +87,18 @@ func (s *MatrixStatsAggregation) UnmarshalJSON(data []byte) error { s.Missing = make(map[string]Float64, 0) } if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/matrixstatsfields.go b/typedapi/types/matrixstatsfields.go index 84a5204b3f..98ccb92933 100644 --- a/typedapi/types/matrixstatsfields.go +++ b/typedapi/types/matrixstatsfields.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MatrixStatsFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L763-L772 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L763-L772 type MatrixStatsFields struct { Correlation map[string]Float64 `json:"correlation"` Count int64 `json:"count"` @@ -62,7 +63,7 @@ func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { s.Correlation = make(map[string]Float64, 0) } if err := dec.Decode(&s.Correlation); err != nil { - return err + return fmt.Errorf("%s | %w", "Correlation", err) } case "count": @@ -72,7 +73,7 @@ func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -85,7 +86,7 @@ func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { s.Covariance = make(map[string]Float64, 0) } if err := dec.Decode(&s.Covariance); err != nil { - return err + return fmt.Errorf("%s | %w", "Covariance", err) } case "kurtosis": @@ -95,7 +96,7 @@ func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Kurtosis", err) } f := Float64(value) s.Kurtosis = f @@ -111,7 +112,7 @@ func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Mean", err) } f := Float64(value) s.Mean = f @@ -122,7 +123,7 @@ func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "skewness": @@ -132,7 +133,7 @@ func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Skewness", err) } f := Float64(value) s.Skewness = f @@ -148,7 +149,7 @@ func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Variance", err) } f := Float64(value) s.Variance = f diff --git a/typedapi/types/maxaggregate.go b/typedapi/types/maxaggregate.go index f7b5badbfb..83bf1e94b6 100644 --- a/typedapi/types/maxaggregate.go +++ b/typedapi/types/maxaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MaxAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L200-L201 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L200-L201 type MaxAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to @@ -57,18 +58,18 @@ func (s *MaxAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/maxaggregation.go b/typedapi/types/maxaggregation.go index ac5a18937d..878ccd0f38 100644 --- a/typedapi/types/maxaggregation.go +++ b/typedapi/types/maxaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MaxAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L162-L162 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L162-L162 type MaxAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -58,13 +59,13 @@ func (s *MaxAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -75,13 +76,13 @@ func (s *MaxAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -90,7 +91,7 @@ func (s *MaxAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -99,7 +100,7 @@ func (s *MaxAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -107,7 +108,7 @@ func (s *MaxAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/maxbucketaggregation.go b/typedapi/types/maxbucketaggregation.go index d37b062fd7..418022a94f 100644 --- a/typedapi/types/maxbucketaggregation.go +++ b/typedapi/types/maxbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // MaxBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L224-L224 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L224-L224 type MaxBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -63,13 +64,13 @@ func (s *MaxBucketAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,18 +81,18 @@ func (s *MaxBucketAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/medianabsolutedeviationaggregate.go b/typedapi/types/medianabsolutedeviationaggregate.go index 9c72681561..0da927aeb1 100644 --- a/typedapi/types/medianabsolutedeviationaggregate.go +++ b/typedapi/types/medianabsolutedeviationaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MedianAbsoluteDeviationAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L194-L195 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L194-L195 type MedianAbsoluteDeviationAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to @@ -57,18 +58,18 @@ func (s *MedianAbsoluteDeviationAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/medianabsolutedeviationaggregation.go b/typedapi/types/medianabsolutedeviationaggregation.go index 87468a4ab7..91f2133829 100644 --- a/typedapi/types/medianabsolutedeviationaggregation.go +++ b/typedapi/types/medianabsolutedeviationaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MedianAbsoluteDeviationAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L164-L170 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L164-L170 type MedianAbsoluteDeviationAggregation struct { // Compression Limits the maximum number of nodes used by the underlying TDigest algorithm // to `20 * compression`, enabling control of memory usage and approximation @@ -67,7 +68,7 @@ func (s *MedianAbsoluteDeviationAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Compression", err) } f := Float64(value) s.Compression = &f @@ -78,13 +79,13 @@ func (s *MedianAbsoluteDeviationAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -95,13 +96,13 @@ func (s *MedianAbsoluteDeviationAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -110,7 +111,7 @@ func (s *MedianAbsoluteDeviationAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -119,7 +120,7 @@ func (s *MedianAbsoluteDeviationAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -127,7 +128,7 @@ func (s *MedianAbsoluteDeviationAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/memmlstats.go b/typedapi/types/memmlstats.go index d2fee7afa4..d4c0688173 100644 --- a/typedapi/types/memmlstats.go +++ b/typedapi/types/memmlstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MemMlStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_memory_stats/types.ts#L90-L111 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_memory_stats/types.ts#L90-L111 type MemMlStats struct { // AnomalyDetectors Amount of native memory set aside for anomaly detection jobs. AnomalyDetectors ByteSize `json:"anomaly_detectors,omitempty"` @@ -77,7 +78,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case "anomaly_detectors": if err := dec.Decode(&s.AnomalyDetectors); err != nil { - return err + return fmt.Errorf("%s | %w", "AnomalyDetectors", err) } case "anomaly_detectors_in_bytes": @@ -88,7 +89,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AnomalyDetectorsInBytes", err) } s.AnomalyDetectorsInBytes = value case float64: @@ -98,7 +99,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case "data_frame_analytics": if err := dec.Decode(&s.DataFrameAnalytics); err != nil { - return err + return fmt.Errorf("%s | %w", "DataFrameAnalytics", err) } case "data_frame_analytics_in_bytes": @@ -109,7 +110,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DataFrameAnalyticsInBytes", err) } s.DataFrameAnalyticsInBytes = value case float64: @@ -119,7 +120,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case "max": if err := dec.Decode(&s.Max); err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } case "max_in_bytes": @@ -130,7 +131,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxInBytes", err) } s.MaxInBytes = value case float64: @@ -140,7 +141,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case "native_code_overhead": if err := dec.Decode(&s.NativeCodeOverhead); err != nil { - return err + return fmt.Errorf("%s | %w", "NativeCodeOverhead", err) } case "native_code_overhead_in_bytes": @@ -151,7 +152,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NativeCodeOverheadInBytes", err) } s.NativeCodeOverheadInBytes = value case float64: @@ -161,7 +162,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case "native_inference": if err := dec.Decode(&s.NativeInference); err != nil { - return err + return fmt.Errorf("%s | %w", "NativeInference", err) } case "native_inference_in_bytes": @@ -172,7 +173,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NativeInferenceInBytes", err) } s.NativeInferenceInBytes = value case float64: diff --git a/typedapi/types/memory.go b/typedapi/types/memory.go index fca6996ee9..eb1a6f0077 100644 --- a/typedapi/types/memory.go +++ b/typedapi/types/memory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Memory type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_memory_stats/types.ts#L25-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_memory_stats/types.ts#L25-L48 type Memory struct { Attributes map[string]string `json:"attributes"` EphemeralId string `json:"ephemeral_id"` @@ -66,37 +67,37 @@ func (s *Memory) UnmarshalJSON(data []byte) error { s.Attributes = make(map[string]string, 0) } if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "ephemeral_id": if err := dec.Decode(&s.EphemeralId); err != nil { - return err + return fmt.Errorf("%s | %w", "EphemeralId", err) } case "jvm": if err := dec.Decode(&s.Jvm); err != nil { - return err + return fmt.Errorf("%s | %w", "Jvm", err) } case "mem": if err := dec.Decode(&s.Mem); err != nil { - return err + return fmt.Errorf("%s | %w", "Mem", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "transport_address": if err := dec.Decode(&s.TransportAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "TransportAddress", err) } } diff --git a/typedapi/types/memorystats.go b/typedapi/types/memorystats.go index dd42e45905..658ad4e052 100644 --- a/typedapi/types/memorystats.go +++ b/typedapi/types/memorystats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MemoryStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L596-L620 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L596-L620 type MemoryStats struct { // AdjustedTotalInBytes If the amount of physical memory has been overridden using the // `es`.`total_memory_bytes` system property then this reports the overridden @@ -73,7 +74,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AdjustedTotalInBytes", err) } s.AdjustedTotalInBytes = &value case float64: @@ -88,7 +89,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FreeInBytes", err) } s.FreeInBytes = &value case float64: @@ -99,7 +100,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { case "resident": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Resident", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -115,7 +116,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ResidentInBytes", err) } s.ResidentInBytes = &value case float64: @@ -126,7 +127,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { case "share": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Share", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -142,7 +143,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShareInBytes", err) } s.ShareInBytes = &value case float64: @@ -157,7 +158,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalInBytes", err) } s.TotalInBytes = &value case float64: @@ -168,7 +169,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { case "total_virtual": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalVirtual", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -184,7 +185,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalVirtualInBytes", err) } s.TotalVirtualInBytes = &value case float64: @@ -199,7 +200,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UsedInBytes", err) } s.UsedInBytes = &value case float64: diff --git a/typedapi/types/memstats.go b/typedapi/types/memstats.go index 55df715818..fd5b280f3c 100644 --- a/typedapi/types/memstats.go +++ b/typedapi/types/memstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MemStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/get_memory_stats/types.ts#L65-L88 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/get_memory_stats/types.ts#L65-L88 type MemStats struct { // AdjustedTotal If the amount of physical memory has been overridden using the // es.total_memory_bytes system property @@ -67,7 +68,7 @@ func (s *MemStats) UnmarshalJSON(data []byte) error { case "adjusted_total": if err := dec.Decode(&s.AdjustedTotal); err != nil { - return err + return fmt.Errorf("%s | %w", "AdjustedTotal", err) } case "adjusted_total_in_bytes": @@ -78,7 +79,7 @@ func (s *MemStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AdjustedTotalInBytes", err) } s.AdjustedTotalInBytes = value case float64: @@ -88,12 +89,12 @@ func (s *MemStats) UnmarshalJSON(data []byte) error { case "ml": if err := dec.Decode(&s.Ml); err != nil { - return err + return fmt.Errorf("%s | %w", "Ml", err) } case "total": if err := dec.Decode(&s.Total); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } case "total_in_bytes": @@ -104,7 +105,7 @@ func (s *MemStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalInBytes", err) } s.TotalInBytes = value case float64: diff --git a/typedapi/types/merge.go b/typedapi/types/merge.go index bcbc67cdfa..ff1b698e88 100644 --- a/typedapi/types/merge.go +++ b/typedapi/types/merge.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Merge type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L330-L332 type Merge struct { Scheduler *MergeScheduler `json:"scheduler,omitempty"` } diff --git a/typedapi/types/mergescheduler.go b/typedapi/types/mergescheduler.go index bd27f7ccd0..5a74293443 100644 --- a/typedapi/types/mergescheduler.go +++ b/typedapi/types/mergescheduler.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MergeScheduler type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L330-L333 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L334-L337 type MergeScheduler struct { MaxMergeCount Stringifiedinteger `json:"max_merge_count,omitempty"` MaxThreadCount Stringifiedinteger `json:"max_thread_count,omitempty"` @@ -52,12 +53,12 @@ func (s *MergeScheduler) UnmarshalJSON(data []byte) error { case "max_merge_count": if err := dec.Decode(&s.MaxMergeCount); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxMergeCount", err) } case "max_thread_count": if err := dec.Decode(&s.MaxThreadCount); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxThreadCount", err) } } diff --git a/typedapi/types/mergesstats.go b/typedapi/types/mergesstats.go index 57eb275262..bf065d0987 100644 --- a/typedapi/types/mergesstats.go +++ b/typedapi/types/mergesstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MergesStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L161-L178 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L161-L178 type MergesStats struct { Current int64 `json:"current"` CurrentDocs int64 `json:"current_docs"` @@ -72,7 +73,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Current", err) } s.Current = value case float64: @@ -87,7 +88,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CurrentDocs", err) } s.CurrentDocs = value case float64: @@ -98,7 +99,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { case "current_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CurrentSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -114,7 +115,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CurrentSizeInBytes", err) } s.CurrentSizeInBytes = value case float64: @@ -129,7 +130,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: @@ -140,7 +141,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { case "total_auto_throttle": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalAutoThrottle", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -156,7 +157,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalAutoThrottleInBytes", err) } s.TotalAutoThrottleInBytes = value case float64: @@ -171,7 +172,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalDocs", err) } s.TotalDocs = value case float64: @@ -182,7 +183,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { case "total_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -198,7 +199,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalSizeInBytes", err) } s.TotalSizeInBytes = value case float64: @@ -208,32 +209,32 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { case "total_stopped_time": if err := dec.Decode(&s.TotalStoppedTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalStoppedTime", err) } case "total_stopped_time_in_millis": if err := dec.Decode(&s.TotalStoppedTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalStoppedTimeInMillis", err) } case "total_throttled_time": if err := dec.Decode(&s.TotalThrottledTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalThrottledTime", err) } case "total_throttled_time_in_millis": if err := dec.Decode(&s.TotalThrottledTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalThrottledTimeInMillis", err) } case "total_time": if err := dec.Decode(&s.TotalTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTime", err) } case "total_time_in_millis": if err := dec.Decode(&s.TotalTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTimeInMillis", err) } } diff --git a/typedapi/types/metadata.go b/typedapi/types/metadata.go index 5cbc124bdb..9355112f49 100644 --- a/typedapi/types/metadata.go +++ b/typedapi/types/metadata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ import ( // Metadata type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L99-L99 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L99-L99 type Metadata map[string]json.RawMessage diff --git a/typedapi/types/metrics.go b/typedapi/types/metrics.go index 9e99df7702..f26313b27b 100644 --- a/typedapi/types/metrics.go +++ b/typedapi/types/metrics.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Metrics type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L76-L76 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L76-L76 type Metrics []string diff --git a/typedapi/types/mgetoperation.go b/typedapi/types/mgetoperation.go index 6c4ff29171..bfdd087a3f 100644 --- a/typedapi/types/mgetoperation.go +++ b/typedapi/types/mgetoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/versiontype" @@ -31,7 +32,7 @@ import ( // MgetOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/mget/types.ts#L32-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/mget/types.ts#L32-L55 type MgetOperation struct { // Id_ The unique document ID. Id_ string `json:"_id"` @@ -65,22 +66,22 @@ func (s *MgetOperation) UnmarshalJSON(data []byte) error { case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } case "stored_fields": @@ -89,24 +90,24 @@ func (s *MgetOperation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } s.StoredFields = append(s.StoredFields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StoredFields); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "version_type": if err := dec.Decode(&s.VersionType); err != nil { - return err + return fmt.Errorf("%s | %w", "VersionType", err) } } diff --git a/typedapi/types/mgetresponseitem.go b/typedapi/types/mgetresponseitem.go index b2b2dad093..9658a09fdb 100644 --- a/typedapi/types/mgetresponseitem.go +++ b/typedapi/types/mgetresponseitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // GetResult // MultiGetError // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/mget/types.ts#L57-L60 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/mget/types.ts#L57-L60 type MgetResponseItem interface{} diff --git a/typedapi/types/migrationfeatureindexinfo.go b/typedapi/types/migrationfeatureindexinfo.go index 5711081888..63ccd350a9 100644 --- a/typedapi/types/migrationfeatureindexinfo.go +++ b/typedapi/types/migrationfeatureindexinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MigrationFeatureIndexInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L44-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L44-L48 type MigrationFeatureIndexInfo struct { FailureCause *ErrorCause `json:"failure_cause,omitempty"` Index string `json:"index"` @@ -53,17 +54,17 @@ func (s *MigrationFeatureIndexInfo) UnmarshalJSON(data []byte) error { case "failure_cause": if err := dec.Decode(&s.FailureCause); err != nil { - return err + return fmt.Errorf("%s | %w", "FailureCause", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/minaggregate.go b/typedapi/types/minaggregate.go index c63b9d7be8..5fb523f784 100644 --- a/typedapi/types/minaggregate.go +++ b/typedapi/types/minaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MinAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L197-L198 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L197-L198 type MinAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to @@ -57,18 +58,18 @@ func (s *MinAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/minaggregation.go b/typedapi/types/minaggregation.go index 2ff95c371e..3e9ee9f15c 100644 --- a/typedapi/types/minaggregation.go +++ b/typedapi/types/minaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MinAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L172-L172 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L172-L172 type MinAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -58,13 +59,13 @@ func (s *MinAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -75,13 +76,13 @@ func (s *MinAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -90,7 +91,7 @@ func (s *MinAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -99,7 +100,7 @@ func (s *MinAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -107,7 +108,7 @@ func (s *MinAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/minbucketaggregation.go b/typedapi/types/minbucketaggregation.go index cb75ef0b4c..15dc550021 100644 --- a/typedapi/types/minbucketaggregation.go +++ b/typedapi/types/minbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // MinBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L226-L226 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L226-L226 type MinBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -63,13 +64,13 @@ func (s *MinBucketAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,18 +81,18 @@ func (s *MinBucketAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/minimallicenseinformation.go b/typedapi/types/minimallicenseinformation.go index 993daf355e..28d44236fc 100644 --- a/typedapi/types/minimallicenseinformation.go +++ b/typedapi/types/minimallicenseinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // MinimalLicenseInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/info/types.ts#L34-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/info/types.ts#L34-L40 type MinimalLicenseInformation struct { ExpiryDateInMillis int64 `json:"expiry_date_in_millis"` Mode licensetype.LicenseType `json:"mode"` @@ -59,28 +60,28 @@ func (s *MinimalLicenseInformation) UnmarshalJSON(data []byte) error { case "expiry_date_in_millis": if err := dec.Decode(&s.ExpiryDateInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ExpiryDateInMillis", err) } case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "uid": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Uid", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/minimumshouldmatch.go b/typedapi/types/minimumshouldmatch.go index 9bd5ee3ddc..f8f372226e 100644 --- a/typedapi/types/minimumshouldmatch.go +++ b/typedapi/types/minimumshouldmatch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // int // string // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L163-L167 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L163-L167 type MinimumShouldMatch interface{} diff --git a/typedapi/types/missing.go b/typedapi/types/missing.go index 1d887c21bb..5a24bf9893 100644 --- a/typedapi/types/missing.go +++ b/typedapi/types/missing.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -27,5 +27,5 @@ package types // Float64 // bool // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/AggregationContainer.ts#L517-L517 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/AggregationContainer.ts#L517-L517 type Missing interface{} diff --git a/typedapi/types/missingaggregate.go b/typedapi/types/missingaggregate.go index c2b33d017e..de5b123649 100644 --- a/typedapi/types/missingaggregate.go +++ b/typedapi/types/missingaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // MissingAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L483-L484 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L483-L484 type MissingAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *MissingAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -71,7 +71,7 @@ func (s *MissingAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } default: @@ -88,490 +88,490 @@ func (s *MissingAggregate) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -581,7 +581,7 @@ func (s *MissingAggregate) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/missingaggregation.go b/typedapi/types/missingaggregation.go index 6d98fab994..07afc1811c 100644 --- a/typedapi/types/missingaggregation.go +++ b/typedapi/types/missingaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MissingAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L574-L580 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L574-L580 type MissingAggregation struct { // Field The name of the field. Field *string `json:"field,omitempty"` @@ -56,23 +57,23 @@ func (s *MissingAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/mlcounter.go b/typedapi/types/mlcounter.go index 80f55d6394..d7b20317ef 100644 --- a/typedapi/types/mlcounter.go +++ b/typedapi/types/mlcounter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MlCounter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L255-L257 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L255-L257 type MlCounter struct { Count int64 `json:"count"` } @@ -57,7 +58,7 @@ func (s *MlCounter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: diff --git a/typedapi/types/mldatafeed.go b/typedapi/types/mldatafeed.go index 24a2218fb2..935db43180 100644 --- a/typedapi/types/mldatafeed.go +++ b/typedapi/types/mldatafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MLDatafeed type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Datafeed.ts#L37-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Datafeed.ts#L37-L58 type MLDatafeed struct { Aggregations map[string]Aggregations `json:"aggregations,omitempty"` // Authorization The security privileges that the datafeed uses to run its queries. If Elastic @@ -73,52 +74,52 @@ func (s *MLDatafeed) UnmarshalJSON(data []byte) error { s.Aggregations = make(map[string]Aggregations, 0) } if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "authorization": if err := dec.Decode(&s.Authorization); err != nil { - return err + return fmt.Errorf("%s | %w", "Authorization", err) } case "chunking_config": if err := dec.Decode(&s.ChunkingConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "ChunkingConfig", err) } case "datafeed_id": if err := dec.Decode(&s.DatafeedId); err != nil { - return err + return fmt.Errorf("%s | %w", "DatafeedId", err) } case "delayed_data_check_config": if err := dec.Decode(&s.DelayedDataCheckConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "DelayedDataCheckConfig", err) } case "frequency": if err := dec.Decode(&s.Frequency); err != nil { - return err + return fmt.Errorf("%s | %w", "Frequency", err) } case "indexes": if err := dec.Decode(&s.Indexes); err != nil { - return err + return fmt.Errorf("%s | %w", "Indexes", err) } case "indices": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "indices_options": if err := dec.Decode(&s.IndicesOptions); err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesOptions", err) } case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "max_empty_searches": @@ -129,7 +130,7 @@ func (s *MLDatafeed) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxEmptySearches", err) } s.MaxEmptySearches = &value case float64: @@ -139,17 +140,17 @@ func (s *MLDatafeed) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "query_delay": if err := dec.Decode(&s.QueryDelay); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryDelay", err) } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } case "script_fields": @@ -157,7 +158,7 @@ func (s *MLDatafeed) UnmarshalJSON(data []byte) error { s.ScriptFields = make(map[string]ScriptField, 0) } if err := dec.Decode(&s.ScriptFields); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptFields", err) } case "scroll_size": @@ -168,7 +169,7 @@ func (s *MLDatafeed) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollSize", err) } s.ScrollSize = &value case float64: diff --git a/typedapi/types/mldataframeanalyticsjobs.go b/typedapi/types/mldataframeanalyticsjobs.go index 7cf7ac08db..74e6d61045 100644 --- a/typedapi/types/mldataframeanalyticsjobs.go +++ b/typedapi/types/mldataframeanalyticsjobs.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // MlDataFrameAnalyticsJobs type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L177-L182 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L177-L182 type MlDataFrameAnalyticsJobs struct { All_ MlDataFrameAnalyticsJobsCount `json:"_all"` AnalysisCounts *MlDataFrameAnalyticsJobsAnalysis `json:"analysis_counts,omitempty"` diff --git a/typedapi/types/mldataframeanalyticsjobsanalysis.go b/typedapi/types/mldataframeanalyticsjobsanalysis.go index 61d38c3842..dcf3747538 100644 --- a/typedapi/types/mldataframeanalyticsjobsanalysis.go +++ b/typedapi/types/mldataframeanalyticsjobsanalysis.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MlDataFrameAnalyticsJobsAnalysis type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L184-L188 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L184-L188 type MlDataFrameAnalyticsJobsAnalysis struct { Classification *int `json:"classification,omitempty"` OutlierDetection *int `json:"outlier_detection,omitempty"` @@ -60,7 +61,7 @@ func (s *MlDataFrameAnalyticsJobsAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Classification", err) } s.Classification = &value case float64: @@ -76,7 +77,7 @@ func (s *MlDataFrameAnalyticsJobsAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OutlierDetection", err) } s.OutlierDetection = &value case float64: @@ -92,7 +93,7 @@ func (s *MlDataFrameAnalyticsJobsAnalysis) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Regression", err) } s.Regression = &value case float64: diff --git a/typedapi/types/mldataframeanalyticsjobscount.go b/typedapi/types/mldataframeanalyticsjobscount.go index 46dff2ab13..94cfa4d653 100644 --- a/typedapi/types/mldataframeanalyticsjobscount.go +++ b/typedapi/types/mldataframeanalyticsjobscount.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MlDataFrameAnalyticsJobsCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L194-L196 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L194-L196 type MlDataFrameAnalyticsJobsCount struct { Count int64 `json:"count"` } @@ -57,7 +58,7 @@ func (s *MlDataFrameAnalyticsJobsCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: diff --git a/typedapi/types/mldataframeanalyticsjobsmemory.go b/typedapi/types/mldataframeanalyticsjobsmemory.go index 1fdd40830f..1debb13710 100644 --- a/typedapi/types/mldataframeanalyticsjobsmemory.go +++ b/typedapi/types/mldataframeanalyticsjobsmemory.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // MlDataFrameAnalyticsJobsMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L190-L192 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L190-L192 type MlDataFrameAnalyticsJobsMemory struct { PeakUsageBytes JobStatistics `json:"peak_usage_bytes"` } diff --git a/typedapi/types/mlfilter.go b/typedapi/types/mlfilter.go index e9270b0ecb..59cb1c2b53 100644 --- a/typedapi/types/mlfilter.go +++ b/typedapi/types/mlfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MLFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Filter.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Filter.ts#L22-L29 type MLFilter struct { // Description A description of the filter. Description *string `json:"description,omitempty"` @@ -58,7 +59,7 @@ func (s *MLFilter) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -69,12 +70,12 @@ func (s *MLFilter) UnmarshalJSON(data []byte) error { case "filter_id": if err := dec.Decode(&s.FilterId); err != nil { - return err + return fmt.Errorf("%s | %w", "FilterId", err) } case "items": if err := dec.Decode(&s.Items); err != nil { - return err + return fmt.Errorf("%s | %w", "Items", err) } } diff --git a/typedapi/types/mlinference.go b/typedapi/types/mlinference.go index ce48018f38..01b553db42 100644 --- a/typedapi/types/mlinference.go +++ b/typedapi/types/mlinference.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // MlInference type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L198-L206 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L198-L206 type MlInference struct { Deployments *MlInferenceDeployments `json:"deployments,omitempty"` IngestProcessors map[string]MlInferenceIngestProcessor `json:"ingest_processors"` diff --git a/typedapi/types/mlinferencedeployments.go b/typedapi/types/mlinferencedeployments.go index e26d871505..6efc3f86bd 100644 --- a/typedapi/types/mlinferencedeployments.go +++ b/typedapi/types/mlinferencedeployments.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MlInferenceDeployments type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L227-L232 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L227-L232 type MlInferenceDeployments struct { Count int `json:"count"` InferenceCounts JobStatistics `json:"inference_counts"` @@ -61,7 +62,7 @@ func (s *MlInferenceDeployments) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -71,17 +72,17 @@ func (s *MlInferenceDeployments) UnmarshalJSON(data []byte) error { case "inference_counts": if err := dec.Decode(&s.InferenceCounts); err != nil { - return err + return fmt.Errorf("%s | %w", "InferenceCounts", err) } case "model_sizes_bytes": if err := dec.Decode(&s.ModelSizesBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSizesBytes", err) } case "time_ms": if err := dec.Decode(&s.TimeMs); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeMs", err) } } diff --git a/typedapi/types/mlinferencedeploymentstimems.go b/typedapi/types/mlinferencedeploymentstimems.go index 2ffc34ca6e..01bb4b9666 100644 --- a/typedapi/types/mlinferencedeploymentstimems.go +++ b/typedapi/types/mlinferencedeploymentstimems.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MlInferenceDeploymentsTimeMs type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L234-L236 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L234-L236 type MlInferenceDeploymentsTimeMs struct { Avg Float64 `json:"avg"` } @@ -57,7 +58,7 @@ func (s *MlInferenceDeploymentsTimeMs) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Avg", err) } f := Float64(value) s.Avg = f diff --git a/typedapi/types/mlinferenceingestprocessor.go b/typedapi/types/mlinferenceingestprocessor.go index 32de288542..f1a1195375 100644 --- a/typedapi/types/mlinferenceingestprocessor.go +++ b/typedapi/types/mlinferenceingestprocessor.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // MlInferenceIngestProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L208-L213 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L208-L213 type MlInferenceIngestProcessor struct { NumDocsProcessed MlInferenceIngestProcessorCount `json:"num_docs_processed"` NumFailures MlInferenceIngestProcessorCount `json:"num_failures"` diff --git a/typedapi/types/mlinferenceingestprocessorcount.go b/typedapi/types/mlinferenceingestprocessorcount.go index eb57702783..225075a09c 100644 --- a/typedapi/types/mlinferenceingestprocessorcount.go +++ b/typedapi/types/mlinferenceingestprocessorcount.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MlInferenceIngestProcessorCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L238-L242 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L238-L242 type MlInferenceIngestProcessorCount struct { Max int64 `json:"max"` Min int64 `json:"min"` @@ -59,7 +60,7 @@ func (s *MlInferenceIngestProcessorCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } s.Max = value case float64: @@ -74,7 +75,7 @@ func (s *MlInferenceIngestProcessorCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } s.Min = value case float64: @@ -89,7 +90,7 @@ func (s *MlInferenceIngestProcessorCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Sum", err) } s.Sum = value case float64: diff --git a/typedapi/types/mlinferencetrainedmodels.go b/typedapi/types/mlinferencetrainedmodels.go index ab2670ff15..799ae0e9c9 100644 --- a/typedapi/types/mlinferencetrainedmodels.go +++ b/typedapi/types/mlinferencetrainedmodels.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // MlInferenceTrainedModels type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L215-L225 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L215-L225 type MlInferenceTrainedModels struct { All_ MlCounter `json:"_all"` Count *MlInferenceTrainedModelsCount `json:"count,omitempty"` diff --git a/typedapi/types/mlinferencetrainedmodelscount.go b/typedapi/types/mlinferencetrainedmodelscount.go index c4fda46595..9067f4b8d4 100644 --- a/typedapi/types/mlinferencetrainedmodelscount.go +++ b/typedapi/types/mlinferencetrainedmodelscount.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MlInferenceTrainedModelsCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L244-L253 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L244-L253 type MlInferenceTrainedModelsCount struct { Classification *int64 `json:"classification,omitempty"` Ner *int64 `json:"ner,omitempty"` @@ -64,7 +65,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Classification", err) } s.Classification = &value case float64: @@ -79,7 +80,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Ner", err) } s.Ner = &value case float64: @@ -94,7 +95,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Other", err) } s.Other = value case float64: @@ -109,7 +110,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PassThrough", err) } s.PassThrough = &value case float64: @@ -124,7 +125,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Prepackaged", err) } s.Prepackaged = value case float64: @@ -139,7 +140,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Regression", err) } s.Regression = &value case float64: @@ -154,7 +155,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TextEmbedding", err) } s.TextEmbedding = &value case float64: @@ -169,7 +170,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: diff --git a/typedapi/types/mljobforecasts.go b/typedapi/types/mljobforecasts.go index 377c50114e..983c8813c0 100644 --- a/typedapi/types/mljobforecasts.go +++ b/typedapi/types/mljobforecasts.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MlJobForecasts type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L172-L175 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L172-L175 type MlJobForecasts struct { ForecastedJobs int64 `json:"forecasted_jobs"` Total int64 `json:"total"` @@ -58,7 +59,7 @@ func (s *MlJobForecasts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ForecastedJobs", err) } s.ForecastedJobs = value case float64: @@ -73,7 +74,7 @@ func (s *MlJobForecasts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: diff --git a/typedapi/types/modelconfig.go b/typedapi/types/modelconfig.go index 3adeab81b6..9af1294c0b 100644 --- a/typedapi/types/modelconfig.go +++ b/typedapi/types/modelconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ModelConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/_types/Services.ts#L23-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/_types/Services.ts#L23-L39 type ModelConfig struct { // Service The service type Service string `json:"service"` @@ -58,7 +59,7 @@ func (s *ModelConfig) UnmarshalJSON(data []byte) error { case "service": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Service", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -69,12 +70,12 @@ func (s *ModelConfig) UnmarshalJSON(data []byte) error { case "service_settings": if err := dec.Decode(&s.ServiceSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "ServiceSettings", err) } case "task_settings": if err := dec.Decode(&s.TaskSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "TaskSettings", err) } } diff --git a/typedapi/types/modelconfigcontainer.go b/typedapi/types/modelconfigcontainer.go index bd15b39558..19a16a6384 100644 --- a/typedapi/types/modelconfigcontainer.go +++ b/typedapi/types/modelconfigcontainer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // ModelConfigContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/_types/Services.ts#L41-L53 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/_types/Services.ts#L41-L53 type ModelConfigContainer struct { // ModelId The model Id ModelId string `json:"model_id"` @@ -64,7 +65,7 @@ func (s *ModelConfigContainer) UnmarshalJSON(data []byte) error { case "model_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -76,7 +77,7 @@ func (s *ModelConfigContainer) UnmarshalJSON(data []byte) error { case "service": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Service", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -87,17 +88,17 @@ func (s *ModelConfigContainer) UnmarshalJSON(data []byte) error { case "service_settings": if err := dec.Decode(&s.ServiceSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "ServiceSettings", err) } case "task_settings": if err := dec.Decode(&s.TaskSettings); err != nil { - return err + return fmt.Errorf("%s | %w", "TaskSettings", err) } case "task_type": if err := dec.Decode(&s.TaskType); err != nil { - return err + return fmt.Errorf("%s | %w", "TaskType", err) } } diff --git a/typedapi/types/modelplotconfig.go b/typedapi/types/modelplotconfig.go index 2c3131b9b9..dc334362c2 100644 --- a/typedapi/types/modelplotconfig.go +++ b/typedapi/types/modelplotconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ModelPlotConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/ModelPlot.ts#L23-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/ModelPlot.ts#L23-L42 type ModelPlotConfig struct { // AnnotationsEnabled If true, enables calculation and storage of the model change annotations for // each entity that is being analyzed. @@ -67,7 +68,7 @@ func (s *ModelPlotConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AnnotationsEnabled", err) } s.AnnotationsEnabled = &value case bool: @@ -81,7 +82,7 @@ func (s *ModelPlotConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = &value case bool: @@ -90,7 +91,7 @@ func (s *ModelPlotConfig) UnmarshalJSON(data []byte) error { case "terms": if err := dec.Decode(&s.Terms); err != nil { - return err + return fmt.Errorf("%s | %w", "Terms", err) } } diff --git a/typedapi/types/modelsizestats.go b/typedapi/types/modelsizestats.go index 252e5b676f..7ff35d18e2 100644 --- a/typedapi/types/modelsizestats.go +++ b/typedapi/types/modelsizestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // ModelSizeStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Model.ts#L59-L81 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Model.ts#L59-L81 type ModelSizeStats struct { AssignmentMemoryBasis *string `json:"assignment_memory_basis,omitempty"` BucketAllocationFailuresCount int64 `json:"bucket_allocation_failures_count"` @@ -76,7 +77,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case "assignment_memory_basis": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AssignmentMemoryBasis", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -92,7 +93,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BucketAllocationFailuresCount", err) } s.BucketAllocationFailuresCount = value case float64: @@ -102,7 +103,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case "categorization_status": if err := dec.Decode(&s.CategorizationStatus); err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizationStatus", err) } case "categorized_doc_count": @@ -113,7 +114,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CategorizedDocCount", err) } s.CategorizedDocCount = value case float64: @@ -129,7 +130,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DeadCategoryCount", err) } s.DeadCategoryCount = value case float64: @@ -145,7 +146,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FailedCategoryCount", err) } s.FailedCategoryCount = value case float64: @@ -161,7 +162,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FrequentCategoryCount", err) } s.FrequentCategoryCount = value case float64: @@ -171,37 +172,37 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "log_time": if err := dec.Decode(&s.LogTime); err != nil { - return err + return fmt.Errorf("%s | %w", "LogTime", err) } case "memory_status": if err := dec.Decode(&s.MemoryStatus); err != nil { - return err + return fmt.Errorf("%s | %w", "MemoryStatus", err) } case "model_bytes": if err := dec.Decode(&s.ModelBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelBytes", err) } case "model_bytes_exceeded": if err := dec.Decode(&s.ModelBytesExceeded); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelBytesExceeded", err) } case "model_bytes_memory_limit": if err := dec.Decode(&s.ModelBytesMemoryLimit); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelBytesMemoryLimit", err) } case "peak_model_bytes": if err := dec.Decode(&s.PeakModelBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "PeakModelBytes", err) } case "rare_category_count": @@ -212,7 +213,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RareCategoryCount", err) } s.RareCategoryCount = value case float64: @@ -223,7 +224,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case "result_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -239,7 +240,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } s.Timestamp = &value case float64: @@ -254,7 +255,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalByFieldCount", err) } s.TotalByFieldCount = value case float64: @@ -270,7 +271,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalCategoryCount", err) } s.TotalCategoryCount = value case float64: @@ -285,7 +286,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalOverFieldCount", err) } s.TotalOverFieldCount = value case float64: @@ -300,7 +301,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalPartitionFieldCount", err) } s.TotalPartitionFieldCount = value case float64: diff --git a/typedapi/types/modelsnapshot.go b/typedapi/types/modelsnapshot.go index b1aa7e2d5a..5acaa3caaf 100644 --- a/typedapi/types/modelsnapshot.go +++ b/typedapi/types/modelsnapshot.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ModelSnapshot type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Model.ts#L25-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Model.ts#L25-L46 type ModelSnapshot struct { // Description An optional description of the job. Description *string `json:"description,omitempty"` @@ -75,7 +76,7 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -86,7 +87,7 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "latest_record_time_stamp": @@ -97,7 +98,7 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "LatestRecordTimeStamp", err) } s.LatestRecordTimeStamp = &value case float64: @@ -113,7 +114,7 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "LatestResultTimeStamp", err) } s.LatestResultTimeStamp = &value case float64: @@ -123,12 +124,12 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { case "min_version": if err := dec.Decode(&s.MinVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "MinVersion", err) } case "model_size_stats": if err := dec.Decode(&s.ModelSizeStats); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSizeStats", err) } case "retain": @@ -138,7 +139,7 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Retain", err) } s.Retain = value case bool: @@ -152,7 +153,7 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SnapshotDocCount", err) } s.SnapshotDocCount = value case float64: @@ -162,7 +163,7 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { case "snapshot_id": if err := dec.Decode(&s.SnapshotId); err != nil { - return err + return fmt.Errorf("%s | %w", "SnapshotId", err) } case "timestamp": @@ -172,7 +173,7 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } s.Timestamp = value case float64: diff --git a/typedapi/types/modelsnapshotupgrade.go b/typedapi/types/modelsnapshotupgrade.go index 1481b2fcf1..339153c231 100644 --- a/typedapi/types/modelsnapshotupgrade.go +++ b/typedapi/types/modelsnapshotupgrade.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // ModelSnapshotUpgrade type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Model.ts#L48-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Model.ts#L48-L57 type ModelSnapshotUpgrade struct { AssignmentExplanation string `json:"assignment_explanation"` JobId string `json:"job_id"` @@ -59,7 +60,7 @@ func (s *ModelSnapshotUpgrade) UnmarshalJSON(data []byte) error { case "assignment_explanation": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AssignmentExplanation", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -70,22 +71,22 @@ func (s *ModelSnapshotUpgrade) UnmarshalJSON(data []byte) error { case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "node": if err := dec.Decode(&s.Node); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } case "snapshot_id": if err := dec.Decode(&s.SnapshotId); err != nil { - return err + return fmt.Errorf("%s | %w", "SnapshotId", err) } case "state": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } } diff --git a/typedapi/types/monitoring.go b/typedapi/types/monitoring.go index ee35357201..e1f71dc60c 100644 --- a/typedapi/types/monitoring.go +++ b/typedapi/types/monitoring.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Monitoring type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L381-L384 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L381-L384 type Monitoring struct { Available bool `json:"available"` CollectionEnabled bool `json:"collection_enabled"` @@ -60,7 +61,7 @@ func (s *Monitoring) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -74,7 +75,7 @@ func (s *Monitoring) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CollectionEnabled", err) } s.CollectionEnabled = value case bool: @@ -88,7 +89,7 @@ func (s *Monitoring) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -100,7 +101,7 @@ func (s *Monitoring) UnmarshalJSON(data []byte) error { s.EnabledExporters = make(map[string]int64, 0) } if err := dec.Decode(&s.EnabledExporters); err != nil { - return err + return fmt.Errorf("%s | %w", "EnabledExporters", err) } } diff --git a/typedapi/types/morelikethisquery.go b/typedapi/types/morelikethisquery.go index 80d56aa920..80ae85e8c4 100644 --- a/typedapi/types/morelikethisquery.go +++ b/typedapi/types/morelikethisquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // MoreLikeThisQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L78-L163 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L78-L163 type MoreLikeThisQuery struct { // Analyzer The analyzer that is used to analyze the free form text. // Defaults to the analyzer associated with the first field in fields. @@ -111,7 +112,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -127,7 +128,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -143,7 +144,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BoostTerms", err) } f := Float64(value) s.BoostTerms = &f @@ -159,7 +160,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FailOnUnsupportedField", err) } s.FailOnUnsupportedField = &value case bool: @@ -168,7 +169,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case "fields": if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "include": @@ -178,7 +179,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Include", err) } s.Include = &value case bool: @@ -191,13 +192,13 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(Like) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Like", err) } s.Like = append(s.Like, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Like); err != nil { - return err + return fmt.Errorf("%s | %w", "Like", err) } } @@ -209,7 +210,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxDocFreq", err) } s.MaxDocFreq = &value case float64: @@ -225,7 +226,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxQueryTerms", err) } s.MaxQueryTerms = &value case float64: @@ -241,7 +242,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWordLength", err) } s.MaxWordLength = &value case float64: @@ -257,7 +258,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocFreq", err) } s.MinDocFreq = &value case float64: @@ -273,7 +274,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinTermFreq", err) } s.MinTermFreq = &value case float64: @@ -289,7 +290,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinWordLength", err) } s.MinWordLength = &value case float64: @@ -299,7 +300,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case "minimum_should_match": if err := dec.Decode(&s.MinimumShouldMatch); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatch", err) } case "per_field_analyzer": @@ -307,13 +308,13 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { s.PerFieldAnalyzer = make(map[string]string, 0) } if err := dec.Decode(&s.PerFieldAnalyzer); err != nil { - return err + return fmt.Errorf("%s | %w", "PerFieldAnalyzer", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -324,7 +325,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "stop_words": @@ -333,13 +334,13 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "StopWords", err) } s.StopWords = append(s.StopWords, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StopWords); err != nil { - return err + return fmt.Errorf("%s | %w", "StopWords", err) } } @@ -349,24 +350,24 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(Like) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Unlike", err) } s.Unlike = append(s.Unlike, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Unlike); err != nil { - return err + return fmt.Errorf("%s | %w", "Unlike", err) } } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "version_type": if err := dec.Decode(&s.VersionType); err != nil { - return err + return fmt.Errorf("%s | %w", "VersionType", err) } } diff --git a/typedapi/types/mountedsnapshot.go b/typedapi/types/mountedsnapshot.go index 0ba8218de7..b89b0ff02d 100644 --- a/typedapi/types/mountedsnapshot.go +++ b/typedapi/types/mountedsnapshot.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MountedSnapshot type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/searchable_snapshots/mount/types.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/searchable_snapshots/mount/types.ts#L23-L27 type MountedSnapshot struct { Indices []string `json:"indices"` Shards ShardStatistics `json:"shards"` @@ -57,24 +58,24 @@ func (s *MountedSnapshot) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } case "shards": if err := dec.Decode(&s.Shards); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards", err) } case "snapshot": if err := dec.Decode(&s.Snapshot); err != nil { - return err + return fmt.Errorf("%s | %w", "Snapshot", err) } } diff --git a/typedapi/types/movingaverageaggregation.go b/typedapi/types/movingaverageaggregation.go index 39285d8698..0433bd0a0d 100644 --- a/typedapi/types/movingaverageaggregation.go +++ b/typedapi/types/movingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -28,5 +28,5 @@ package types // HoltMovingAverageAggregation // HoltWintersMovingAverageAggregation // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L228-L234 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L228-L234 type MovingAverageAggregation interface{} diff --git a/typedapi/types/movingfunctionaggregation.go b/typedapi/types/movingfunctionaggregation.go index 59ae166a28..48840abec4 100644 --- a/typedapi/types/movingfunctionaggregation.go +++ b/typedapi/types/movingfunctionaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // MovingFunctionAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L288-L303 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L288-L303 type MovingFunctionAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -72,13 +73,13 @@ func (s *MovingFunctionAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -89,18 +90,18 @@ func (s *MovingFunctionAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -112,7 +113,7 @@ func (s *MovingFunctionAggregation) UnmarshalJSON(data []byte) error { case "script": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -129,7 +130,7 @@ func (s *MovingFunctionAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Shift", err) } s.Shift = &value case float64: @@ -145,7 +146,7 @@ func (s *MovingFunctionAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Window", err) } s.Window = &value case float64: diff --git a/typedapi/types/movingpercentilesaggregation.go b/typedapi/types/movingpercentilesaggregation.go index b1a9eddfca..3446d72d52 100644 --- a/typedapi/types/movingpercentilesaggregation.go +++ b/typedapi/types/movingpercentilesaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // MovingPercentilesAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L305-L317 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L305-L317 type MovingPercentilesAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -71,13 +72,13 @@ func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -88,7 +89,7 @@ func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "keyed": @@ -98,7 +99,7 @@ func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Keyed", err) } s.Keyed = &value case bool: @@ -107,13 +108,13 @@ func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -130,7 +131,7 @@ func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Shift", err) } s.Shift = &value case float64: @@ -146,7 +147,7 @@ func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Window", err) } s.Window = &value case float64: diff --git a/typedapi/types/msearchrequestitem.go b/typedapi/types/msearchrequestitem.go index dc766a018f..49e1b4737b 100644 --- a/typedapi/types/msearchrequestitem.go +++ b/typedapi/types/msearchrequestitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // MultisearchHeader // MultisearchBody // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/msearch/types.ts#L48-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/msearch/types.ts#L48-L51 type MsearchRequestItem interface{} diff --git a/typedapi/types/msearchresponseitem.go b/typedapi/types/msearchresponseitem.go index 7671322e71..8004a1c033 100644 --- a/typedapi/types/msearchresponseitem.go +++ b/typedapi/types/msearchresponseitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // MultiSearchItem // ErrorResponseBase // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/msearch/types.ts#L209-L212 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/msearch/types.ts#L209-L212 type MsearchResponseItem interface{} diff --git a/typedapi/types/mtermvectorsoperation.go b/typedapi/types/mtermvectorsoperation.go index a3206d562d..d3435b7911 100644 --- a/typedapi/types/mtermvectorsoperation.go +++ b/typedapi/types/mtermvectorsoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // MTermVectorsOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/mtermvectors/types.ts#L35-L94 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/mtermvectors/types.ts#L35-L94 type MTermVectorsOperation struct { // Doc An artificial document (a document not present in the index) for which you // want to retrieve term vectors. @@ -84,7 +85,7 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { case "doc": if err := dec.Decode(&s.Doc); err != nil { - return err + return fmt.Errorf("%s | %w", "Doc", err) } case "field_statistics": @@ -94,7 +95,7 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FieldStatistics", err) } s.FieldStatistics = &value case bool: @@ -107,29 +108,29 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } s.Fields = append(s.Fields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } } case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "offsets": @@ -139,7 +140,7 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Offsets", err) } s.Offsets = &value case bool: @@ -153,7 +154,7 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Payloads", err) } s.Payloads = &value case bool: @@ -167,7 +168,7 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Positions", err) } s.Positions = &value case bool: @@ -176,7 +177,7 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "term_statistics": @@ -186,7 +187,7 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TermStatistics", err) } s.TermStatistics = &value case bool: @@ -195,12 +196,12 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "version_type": if err := dec.Decode(&s.VersionType); err != nil { - return err + return fmt.Errorf("%s | %w", "VersionType", err) } } diff --git a/typedapi/types/multibucketaggregatebaseadjacencymatrixbucket.go b/typedapi/types/multibucketaggregatebaseadjacencymatrixbucket.go index 343b8d9980..0d6cb5f7c9 100644 --- a/typedapi/types/multibucketaggregatebaseadjacencymatrixbucket.go +++ b/typedapi/types/multibucketaggregatebaseadjacencymatrixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseAdjacencyMatrixBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseAdjacencyMatrixBucket struct { Buckets BucketsAdjacencyMatrixBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseAdjacencyMatrixBucket) UnmarshalJSON(data []byt case '{': o := make(map[string]AdjacencyMatrixBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []AdjacencyMatrixBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasecompositebucket.go b/typedapi/types/multibucketaggregatebasecompositebucket.go index ee0c49f2c2..964b2c8662 100644 --- a/typedapi/types/multibucketaggregatebasecompositebucket.go +++ b/typedapi/types/multibucketaggregatebasecompositebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseCompositeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseCompositeBucket struct { Buckets BucketsCompositeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseCompositeBucket) UnmarshalJSON(data []byte) err case '{': o := make(map[string]CompositeBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []CompositeBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasedatehistogrambucket.go b/typedapi/types/multibucketaggregatebasedatehistogrambucket.go index a8551f85ac..453514aa4b 100644 --- a/typedapi/types/multibucketaggregatebasedatehistogrambucket.go +++ b/typedapi/types/multibucketaggregatebasedatehistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseDateHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseDateHistogramBucket struct { Buckets BucketsDateHistogramBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseDateHistogramBucket) UnmarshalJSON(data []byte) case '{': o := make(map[string]DateHistogramBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []DateHistogramBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasedoubletermsbucket.go b/typedapi/types/multibucketaggregatebasedoubletermsbucket.go index 186a163f0a..74e533e872 100644 --- a/typedapi/types/multibucketaggregatebasedoubletermsbucket.go +++ b/typedapi/types/multibucketaggregatebasedoubletermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseDoubleTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseDoubleTermsBucket struct { Buckets BucketsDoubleTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) e case '{': o := make(map[string]DoubleTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []DoubleTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasefiltersbucket.go b/typedapi/types/multibucketaggregatebasefiltersbucket.go index 083f5b94a4..01b71418f3 100644 --- a/typedapi/types/multibucketaggregatebasefiltersbucket.go +++ b/typedapi/types/multibucketaggregatebasefiltersbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseFiltersBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseFiltersBucket struct { Buckets BucketsFiltersBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseFiltersBucket) UnmarshalJSON(data []byte) error case '{': o := make(map[string]FiltersBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []FiltersBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasefrequentitemsetsbucket.go b/typedapi/types/multibucketaggregatebasefrequentitemsetsbucket.go index 700c768b2d..a9527bfe7e 100644 --- a/typedapi/types/multibucketaggregatebasefrequentitemsetsbucket.go +++ b/typedapi/types/multibucketaggregatebasefrequentitemsetsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseFrequentItemSetsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseFrequentItemSetsBucket struct { Buckets BucketsFrequentItemSetsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseFrequentItemSetsBucket) UnmarshalJSON(data []by case '{': o := make(map[string]FrequentItemSetsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []FrequentItemSetsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasegeohashgridbucket.go b/typedapi/types/multibucketaggregatebasegeohashgridbucket.go index 24ad08112b..df47145194 100644 --- a/typedapi/types/multibucketaggregatebasegeohashgridbucket.go +++ b/typedapi/types/multibucketaggregatebasegeohashgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseGeoHashGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseGeoHashGridBucket struct { Buckets BucketsGeoHashGridBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseGeoHashGridBucket) UnmarshalJSON(data []byte) e case '{': o := make(map[string]GeoHashGridBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []GeoHashGridBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasegeohexgridbucket.go b/typedapi/types/multibucketaggregatebasegeohexgridbucket.go index 5d27872224..38a5c95c6a 100644 --- a/typedapi/types/multibucketaggregatebasegeohexgridbucket.go +++ b/typedapi/types/multibucketaggregatebasegeohexgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseGeoHexGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseGeoHexGridBucket struct { Buckets BucketsGeoHexGridBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseGeoHexGridBucket) UnmarshalJSON(data []byte) er case '{': o := make(map[string]GeoHexGridBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []GeoHexGridBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasegeotilegridbucket.go b/typedapi/types/multibucketaggregatebasegeotilegridbucket.go index afd58c4c9f..3036188e73 100644 --- a/typedapi/types/multibucketaggregatebasegeotilegridbucket.go +++ b/typedapi/types/multibucketaggregatebasegeotilegridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseGeoTileGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseGeoTileGridBucket struct { Buckets BucketsGeoTileGridBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseGeoTileGridBucket) UnmarshalJSON(data []byte) e case '{': o := make(map[string]GeoTileGridBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []GeoTileGridBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasehistogrambucket.go b/typedapi/types/multibucketaggregatebasehistogrambucket.go index baa61a6761..fdd33d6ccd 100644 --- a/typedapi/types/multibucketaggregatebasehistogrambucket.go +++ b/typedapi/types/multibucketaggregatebasehistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseHistogramBucket struct { Buckets BucketsHistogramBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseHistogramBucket) UnmarshalJSON(data []byte) err case '{': o := make(map[string]HistogramBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []HistogramBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebaseipprefixbucket.go b/typedapi/types/multibucketaggregatebaseipprefixbucket.go index 29a4f7ac26..5c4b63d0f5 100644 --- a/typedapi/types/multibucketaggregatebaseipprefixbucket.go +++ b/typedapi/types/multibucketaggregatebaseipprefixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseIpPrefixBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseIpPrefixBucket struct { Buckets BucketsIpPrefixBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseIpPrefixBucket) UnmarshalJSON(data []byte) erro case '{': o := make(map[string]IpPrefixBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []IpPrefixBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebaseiprangebucket.go b/typedapi/types/multibucketaggregatebaseiprangebucket.go index a91206be7f..ea40cb09eb 100644 --- a/typedapi/types/multibucketaggregatebaseiprangebucket.go +++ b/typedapi/types/multibucketaggregatebaseiprangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseIpRangeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseIpRangeBucket struct { Buckets BucketsIpRangeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseIpRangeBucket) UnmarshalJSON(data []byte) error case '{': o := make(map[string]IpRangeBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []IpRangeBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebaselongraretermsbucket.go b/typedapi/types/multibucketaggregatebaselongraretermsbucket.go index 6592337769..5ae911a43a 100644 --- a/typedapi/types/multibucketaggregatebaselongraretermsbucket.go +++ b/typedapi/types/multibucketaggregatebaselongraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseLongRareTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseLongRareTermsBucket struct { Buckets BucketsLongRareTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseLongRareTermsBucket) UnmarshalJSON(data []byte) case '{': o := make(map[string]LongRareTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []LongRareTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebaselongtermsbucket.go b/typedapi/types/multibucketaggregatebaselongtermsbucket.go index d42cd63017..9d19535b7d 100644 --- a/typedapi/types/multibucketaggregatebaselongtermsbucket.go +++ b/typedapi/types/multibucketaggregatebaselongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseLongTermsBucket struct { Buckets BucketsLongTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) err case '{': o := make(map[string]LongTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []LongTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasemultitermsbucket.go b/typedapi/types/multibucketaggregatebasemultitermsbucket.go index fee5d1f7e7..05af464b59 100644 --- a/typedapi/types/multibucketaggregatebasemultitermsbucket.go +++ b/typedapi/types/multibucketaggregatebasemultitermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseMultiTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseMultiTermsBucket struct { Buckets BucketsMultiTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) er case '{': o := make(map[string]MultiTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []MultiTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebaserangebucket.go b/typedapi/types/multibucketaggregatebaserangebucket.go index c8269fba49..64d1b5b6b9 100644 --- a/typedapi/types/multibucketaggregatebaserangebucket.go +++ b/typedapi/types/multibucketaggregatebaserangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseRangeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseRangeBucket struct { Buckets BucketsRangeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseRangeBucket) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]RangeBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []RangeBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasesignificantlongtermsbucket.go b/typedapi/types/multibucketaggregatebasesignificantlongtermsbucket.go index 444595e24c..85b29ae891 100644 --- a/typedapi/types/multibucketaggregatebasesignificantlongtermsbucket.go +++ b/typedapi/types/multibucketaggregatebasesignificantlongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseSignificantLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseSignificantLongTermsBucket struct { Buckets BucketsSignificantLongTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseSignificantLongTermsBucket) UnmarshalJSON(data case '{': o := make(map[string]SignificantLongTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []SignificantLongTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasesignificantstringtermsbucket.go b/typedapi/types/multibucketaggregatebasesignificantstringtermsbucket.go index f9c2a2d72b..2e6cfac438 100644 --- a/typedapi/types/multibucketaggregatebasesignificantstringtermsbucket.go +++ b/typedapi/types/multibucketaggregatebasesignificantstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseSignificantStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseSignificantStringTermsBucket struct { Buckets BucketsSignificantStringTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseSignificantStringTermsBucket) UnmarshalJSON(dat case '{': o := make(map[string]SignificantStringTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []SignificantStringTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasestringraretermsbucket.go b/typedapi/types/multibucketaggregatebasestringraretermsbucket.go index 2be35e85e9..84e7602502 100644 --- a/typedapi/types/multibucketaggregatebasestringraretermsbucket.go +++ b/typedapi/types/multibucketaggregatebasestringraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseStringRareTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseStringRareTermsBucket struct { Buckets BucketsStringRareTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseStringRareTermsBucket) UnmarshalJSON(data []byt case '{': o := make(map[string]StringRareTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []StringRareTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasestringtermsbucket.go b/typedapi/types/multibucketaggregatebasestringtermsbucket.go index 9de9ba9c0d..f3e015d624 100644 --- a/typedapi/types/multibucketaggregatebasestringtermsbucket.go +++ b/typedapi/types/multibucketaggregatebasestringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseStringTermsBucket struct { Buckets BucketsStringTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) e case '{': o := make(map[string]StringTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []StringTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasevariablewidthhistogrambucket.go b/typedapi/types/multibucketaggregatebasevariablewidthhistogrambucket.go index dcfe510c9e..0cea28859f 100644 --- a/typedapi/types/multibucketaggregatebasevariablewidthhistogrambucket.go +++ b/typedapi/types/multibucketaggregatebasevariablewidthhistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseVariableWidthHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseVariableWidthHistogramBucket struct { Buckets BucketsVariableWidthHistogramBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseVariableWidthHistogramBucket) UnmarshalJSON(dat case '{': o := make(map[string]VariableWidthHistogramBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []VariableWidthHistogramBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multibucketaggregatebasevoid.go b/typedapi/types/multibucketaggregatebasevoid.go index 84bf7c0db4..7672cc678a 100644 --- a/typedapi/types/multibucketaggregatebasevoid.go +++ b/typedapi/types/multibucketaggregatebasevoid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiBucketAggregateBaseVoid type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseVoid struct { Buckets BucketsVoid `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *MultiBucketAggregateBaseVoid) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]interface{}, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []interface{}{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/multigeterror.go b/typedapi/types/multigeterror.go index e076e96efe..121f8797e6 100644 --- a/typedapi/types/multigeterror.go +++ b/typedapi/types/multigeterror.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiGetError type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/mget/types.ts#L62-L66 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/mget/types.ts#L62-L66 type MultiGetError struct { Error ErrorCause `json:"error"` Id_ string `json:"_id"` @@ -53,17 +54,17 @@ func (s *MultiGetError) UnmarshalJSON(data []byte) error { case "error": if err := dec.Decode(&s.Error); err != nil { - return err + return fmt.Errorf("%s | %w", "Error", err) } case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } } diff --git a/typedapi/types/multimatchquery.go b/typedapi/types/multimatchquery.go index 484117c1e9..f11b9afaae 100644 --- a/typedapi/types/multimatchquery.go +++ b/typedapi/types/multimatchquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // MultiMatchQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L456-L539 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L456-L539 type MultiMatchQuery struct { // Analyzer Analyzer used to convert the text in the query value into tokens. Analyzer *string `json:"analyzer,omitempty"` @@ -105,7 +106,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -121,7 +122,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AutoGenerateSynonymsPhraseQuery", err) } s.AutoGenerateSynonymsPhraseQuery = &value case bool: @@ -135,7 +136,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -151,7 +152,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CutoffFrequency", err) } f := Float64(value) s.CutoffFrequency = &f @@ -166,24 +167,24 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } s.Fields = append(s.Fields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } } case "fuzziness": if err := dec.Decode(&s.Fuzziness); err != nil { - return err + return fmt.Errorf("%s | %w", "Fuzziness", err) } case "fuzzy_rewrite": if err := dec.Decode(&s.FuzzyRewrite); err != nil { - return err + return fmt.Errorf("%s | %w", "FuzzyRewrite", err) } case "fuzzy_transpositions": @@ -193,7 +194,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FuzzyTranspositions", err) } s.FuzzyTranspositions = &value case bool: @@ -207,7 +208,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lenient", err) } s.Lenient = &value case bool: @@ -222,7 +223,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxExpansions", err) } s.MaxExpansions = &value case float64: @@ -232,12 +233,12 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case "minimum_should_match": if err := dec.Decode(&s.MinimumShouldMatch); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatch", err) } case "operator": if err := dec.Decode(&s.Operator); err != nil { - return err + return fmt.Errorf("%s | %w", "Operator", err) } case "prefix_length": @@ -248,7 +249,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrefixLength", err) } s.PrefixLength = &value case float64: @@ -259,7 +260,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -271,7 +272,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -288,7 +289,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Slop", err) } s.Slop = &value case float64: @@ -303,7 +304,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TieBreaker", err) } f := Float64(value) s.TieBreaker = &f @@ -314,12 +315,12 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "zero_terms_query": if err := dec.Decode(&s.ZeroTermsQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "ZeroTermsQuery", err) } } diff --git a/typedapi/types/multiplexertokenfilter.go b/typedapi/types/multiplexertokenfilter.go index 0401e253b1..3185fc88fb 100644 --- a/typedapi/types/multiplexertokenfilter.go +++ b/typedapi/types/multiplexertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiplexerTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L260-L264 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L260-L264 type MultiplexerTokenFilter struct { Filters []string `json:"filters"` PreserveOriginal Stringifiedboolean `json:"preserve_original,omitempty"` @@ -54,22 +55,22 @@ func (s *MultiplexerTokenFilter) UnmarshalJSON(data []byte) error { case "filters": if err := dec.Decode(&s.Filters); err != nil { - return err + return fmt.Errorf("%s | %w", "Filters", err) } case "preserve_original": if err := dec.Decode(&s.PreserveOriginal); err != nil { - return err + return fmt.Errorf("%s | %w", "PreserveOriginal", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/multisearchbody.go b/typedapi/types/multisearchbody.go index 6a426c0207..3ae9f3e8b9 100644 --- a/typedapi/types/multisearchbody.go +++ b/typedapi/types/multisearchbody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MultisearchBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/msearch/types.ts#L71-L202 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/msearch/types.ts#L71-L202 type MultisearchBody struct { Aggregations map[string]Aggregations `json:"aggregations,omitempty"` Collapse *FieldCollapse `json:"collapse,omitempty"` @@ -140,17 +141,17 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { s.Aggregations = make(map[string]Aggregations, 0) } if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "collapse": if err := dec.Decode(&s.Collapse); err != nil { - return err + return fmt.Errorf("%s | %w", "Collapse", err) } case "docvalue_fields": if err := dec.Decode(&s.DocvalueFields); err != nil { - return err + return fmt.Errorf("%s | %w", "DocvalueFields", err) } case "explain": @@ -160,7 +161,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Explain", err) } s.Explain = &value case bool: @@ -172,12 +173,12 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { s.Ext = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Ext); err != nil { - return err + return fmt.Errorf("%s | %w", "Ext", err) } case "fields": if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "from": @@ -188,7 +189,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } s.From = &value case float64: @@ -198,12 +199,12 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case "highlight": if err := dec.Decode(&s.Highlight); err != nil { - return err + return fmt.Errorf("%s | %w", "Highlight", err) } case "indices_boost": if err := dec.Decode(&s.IndicesBoost); err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesBoost", err) } case "knn": @@ -212,13 +213,13 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewKnnQuery() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Knn", err) } s.Knn = append(s.Knn, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Knn); err != nil { - return err + return fmt.Errorf("%s | %w", "Knn", err) } } @@ -229,7 +230,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinScore", err) } f := Float64(value) s.MinScore = &f @@ -240,12 +241,12 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case "pit": if err := dec.Decode(&s.Pit); err != nil { - return err + return fmt.Errorf("%s | %w", "Pit", err) } case "post_filter": if err := dec.Decode(&s.PostFilter); err != nil { - return err + return fmt.Errorf("%s | %w", "PostFilter", err) } case "profile": @@ -255,7 +256,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } s.Profile = &value case bool: @@ -264,7 +265,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "rescore": @@ -273,19 +274,19 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewRescore() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Rescore", err) } s.Rescore = append(s.Rescore, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Rescore); err != nil { - return err + return fmt.Errorf("%s | %w", "Rescore", err) } } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } case "script_fields": @@ -293,12 +294,12 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { s.ScriptFields = make(map[string]ScriptField, 0) } if err := dec.Decode(&s.ScriptFields); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptFields", err) } case "search_after": if err := dec.Decode(&s.SearchAfter); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchAfter", err) } case "seq_no_primary_term": @@ -308,7 +309,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNoPrimaryTerm", err) } s.SeqNoPrimaryTerm = &value case bool: @@ -323,7 +324,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -337,24 +338,24 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(SortCombinations) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } s.Sort = append(s.Sort, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } case "stats": if err := dec.Decode(&s.Stats); err != nil { - return err + return fmt.Errorf("%s | %w", "Stats", err) } case "stored_fields": @@ -363,19 +364,19 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } s.StoredFields = append(s.StoredFields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StoredFields); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } } case "suggest": if err := dec.Decode(&s.Suggest); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } case "terminate_after": @@ -385,7 +386,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TerminateAfter", err) } s.TerminateAfter = &value case float64: @@ -396,7 +397,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case "timeout": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -412,7 +413,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TrackScores", err) } s.TrackScores = &value case bool: @@ -421,7 +422,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case "track_total_hits": if err := dec.Decode(&s.TrackTotalHits); err != nil { - return err + return fmt.Errorf("%s | %w", "TrackTotalHits", err) } case "version": @@ -431,7 +432,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } s.Version = &value case bool: diff --git a/typedapi/types/multisearchheader.go b/typedapi/types/multisearchheader.go index e67ae61c50..750bfe28e2 100644 --- a/typedapi/types/multisearchheader.go +++ b/typedapi/types/multisearchheader.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // MultisearchHeader type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/msearch/types.ts#L53-L68 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/msearch/types.ts#L53-L68 type MultisearchHeader struct { AllowNoIndices *bool `json:"allow_no_indices,omitempty"` AllowPartialSearchResults *bool `json:"allow_partial_search_results,omitempty"` @@ -70,7 +71,7 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowNoIndices", err) } s.AllowNoIndices = &value case bool: @@ -84,7 +85,7 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowPartialSearchResults", err) } s.AllowPartialSearchResults = &value case bool: @@ -98,7 +99,7 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CcsMinimizeRoundtrips", err) } s.CcsMinimizeRoundtrips = &value case bool: @@ -111,13 +112,13 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := &expandwildcard.ExpandWildcard{} if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "ExpandWildcards", err) } s.ExpandWildcards = append(s.ExpandWildcards, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.ExpandWildcards); err != nil { - return err + return fmt.Errorf("%s | %w", "ExpandWildcards", err) } } @@ -128,7 +129,7 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreThrottled", err) } s.IgnoreThrottled = &value case bool: @@ -142,7 +143,7 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnavailable", err) } s.IgnoreUnavailable = &value case bool: @@ -155,20 +156,20 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = append(s.Index, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } } case "preference": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Preference", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -184,7 +185,7 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RequestCache", err) } s.RequestCache = &value case bool: @@ -193,12 +194,12 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "search_type": if err := dec.Decode(&s.SearchType); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchType", err) } } diff --git a/typedapi/types/multisearchitem.go b/typedapi/types/multisearchitem.go index 94fe4a0bae..6aa9157295 100644 --- a/typedapi/types/multisearchitem.go +++ b/typedapi/types/multisearchitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" "strings" @@ -31,7 +32,7 @@ import ( // MultiSearchItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/msearch/types.ts#L214-L217 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/msearch/types.ts#L214-L217 type MultiSearchItem struct { Aggregations map[string]Aggregate `json:"aggregations,omitempty"` Clusters_ *ClusterStatistics `json:"_clusters,omitempty"` @@ -90,490 +91,490 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -583,7 +584,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } @@ -592,7 +593,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { case "_clusters": if err := dec.Decode(&s.Clusters_); err != nil { - return err + return fmt.Errorf("%s | %w", "Clusters_", err) } case "fields": @@ -600,12 +601,12 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "hits": if err := dec.Decode(&s.Hits); err != nil { - return err + return fmt.Errorf("%s | %w", "Hits", err) } case "max_score": @@ -615,7 +616,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxScore", err) } f := Float64(value) s.MaxScore = &f @@ -631,7 +632,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumReducePhases", err) } s.NumReducePhases = &value case float64: @@ -641,22 +642,22 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { case "pit_id": if err := dec.Decode(&s.PitId); err != nil { - return err + return fmt.Errorf("%s | %w", "PitId", err) } case "profile": if err := dec.Decode(&s.Profile); err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } case "_scroll_id": if err := dec.Decode(&s.ScrollId_); err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollId_", err) } case "_shards": if err := dec.Decode(&s.Shards_); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards_", err) } case "status": @@ -667,7 +668,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } s.Status = &value case float64: @@ -700,28 +701,28 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { case "completion": o := NewCompletionSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "phrase": o := NewPhraseSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "term": o := NewTermSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) } @@ -731,7 +732,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[value] = append(s.Suggest[value], o) } @@ -745,7 +746,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TerminatedEarly", err) } s.TerminatedEarly = &value case bool: @@ -759,7 +760,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimedOut", err) } s.TimedOut = value case bool: @@ -773,7 +774,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } s.Took = value case float64: diff --git a/typedapi/types/multisearchresult.go b/typedapi/types/multisearchresult.go index b7167f0233..12db3d3515 100644 --- a/typedapi/types/multisearchresult.go +++ b/typedapi/types/multisearchresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MultiSearchResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/msearch/types.ts#L204-L207 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/msearch/types.ts#L204-L207 type MultiSearchResult struct { Responses []MsearchResponseItem `json:"responses"` Took int64 `json:"took"` @@ -54,7 +55,7 @@ func (s *MultiSearchResult) UnmarshalJSON(data []byte) error { case "responses": messageArray := []json.RawMessage{} if err := dec.Decode(&messageArray); err != nil { - return err + return fmt.Errorf("%s | %w", "Responses", err) } responses: for _, message := range messageArray { @@ -65,7 +66,7 @@ func (s *MultiSearchResult) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Responses", err) } switch t { @@ -74,7 +75,7 @@ func (s *MultiSearchResult) UnmarshalJSON(data []byte) error { o := NewMultiSearchItem() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Responses", err) } s.Responses = append(s.Responses, o) continue responses @@ -83,7 +84,7 @@ func (s *MultiSearchResult) UnmarshalJSON(data []byte) error { o := NewErrorResponseBase() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Responses", err) } s.Responses = append(s.Responses, o) continue responses @@ -99,7 +100,7 @@ func (s *MultiSearchResult) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } s.Took = value case float64: diff --git a/typedapi/types/multitermlookup.go b/typedapi/types/multitermlookup.go index ab4cd971da..88a7d35205 100644 --- a/typedapi/types/multitermlookup.go +++ b/typedapi/types/multitermlookup.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // MultiTermLookup type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L624-L634 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L624-L634 type MultiTermLookup struct { // Field A fields from which to retrieve terms. Field string `json:"field"` @@ -55,12 +56,12 @@ func (s *MultiTermLookup) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } } diff --git a/typedapi/types/multitermsaggregate.go b/typedapi/types/multitermsaggregate.go index 43ccdf5609..2abd1ed8c1 100644 --- a/typedapi/types/multitermsaggregate.go +++ b/typedapi/types/multitermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MultiTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L461-L463 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L461-L463 type MultiTermsAggregate struct { Buckets BucketsMultiTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -63,13 +64,13 @@ func (s *MultiTermsAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]MultiTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []MultiTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -81,7 +82,7 @@ func (s *MultiTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } s.DocCountErrorUpperBound = &value case float64: @@ -91,7 +92,7 @@ func (s *MultiTermsAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "sum_other_doc_count": @@ -101,7 +102,7 @@ func (s *MultiTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SumOtherDocCount", err) } s.SumOtherDocCount = &value case float64: diff --git a/typedapi/types/multitermsaggregation.go b/typedapi/types/multitermsaggregation.go index 2fb4e88d52..31661cfa22 100644 --- a/typedapi/types/multitermsaggregation.go +++ b/typedapi/types/multitermsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // MultiTermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L582-L622 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L582-L622 type MultiTermsAggregation struct { // CollectMode Specifies the strategy for data collection. CollectMode *termsaggregationcollectmode.TermsAggregationCollectMode `json:"collect_mode,omitempty"` @@ -76,12 +77,12 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { case "collect_mode": if err := dec.Decode(&s.CollectMode); err != nil { - return err + return fmt.Errorf("%s | %w", "CollectMode", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min_doc_count": @@ -91,7 +92,7 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocCount", err) } s.MinDocCount = &value case float64: @@ -102,7 +103,7 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -121,13 +122,13 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]sortorder.SortOrder, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } s.Order = o case '[': o := make([]map[string]sortorder.SortOrder, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } s.Order = o } @@ -139,7 +140,7 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardMinDocCount", err) } s.ShardMinDocCount = &value case float64: @@ -155,7 +156,7 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardSize", err) } s.ShardSize = &value case float64: @@ -170,7 +171,7 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShowTermDocCountError", err) } s.ShowTermDocCountError = &value case bool: @@ -185,7 +186,7 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -195,7 +196,7 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { case "terms": if err := dec.Decode(&s.Terms); err != nil { - return err + return fmt.Errorf("%s | %w", "Terms", err) } } diff --git a/typedapi/types/multitermsbucket.go b/typedapi/types/multitermsbucket.go index e28a389fcc..2be79d160e 100644 --- a/typedapi/types/multitermsbucket.go +++ b/typedapi/types/multitermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // MultiTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L465-L469 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L465-L469 type MultiTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -63,7 +63,7 @@ func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -78,7 +78,7 @@ func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } s.DocCountErrorUpperBound = &value case float64: @@ -88,13 +88,13 @@ func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { case "key": if err := dec.Decode(&s.Key); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } case "key_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "KeyAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -117,490 +117,490 @@ func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -610,7 +610,7 @@ func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/murmur3hashproperty.go b/typedapi/types/murmur3hashproperty.go index 7e876924b8..26a92b049a 100644 --- a/typedapi/types/murmur3hashproperty.go +++ b/typedapi/types/murmur3hashproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // Murmur3HashProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/specialized.ts#L75-L77 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/specialized.ts#L75-L77 type Murmur3HashProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -68,13 +69,13 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -85,7 +86,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -94,7 +95,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -412,7 +413,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -425,7 +426,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -738,7 +739,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -754,7 +755,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -763,7 +764,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/mutualinformationheuristic.go b/typedapi/types/mutualinformationheuristic.go index ed066b849d..4c1c83a94a 100644 --- a/typedapi/types/mutualinformationheuristic.go +++ b/typedapi/types/mutualinformationheuristic.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // MutualInformationHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L753-L762 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L753-L762 type MutualInformationHeuristic struct { // BackgroundIsSuperset Set to `false` if you defined a custom background filter that represents a // different set of documents that you want to compare to. @@ -62,7 +63,7 @@ func (s *MutualInformationHeuristic) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "BackgroundIsSuperset", err) } s.BackgroundIsSuperset = &value case bool: @@ -76,7 +77,7 @@ func (s *MutualInformationHeuristic) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeNegatives", err) } s.IncludeNegatives = &value case bool: diff --git a/typedapi/types/names.go b/typedapi/types/names.go index 20886eae11..e0ea813366 100644 --- a/typedapi/types/names.go +++ b/typedapi/types/names.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Names type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L81-L81 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L81-L81 type Names []string diff --git a/typedapi/types/nativecode.go b/typedapi/types/nativecode.go index ea40349907..0132834a33 100644 --- a/typedapi/types/nativecode.go +++ b/typedapi/types/nativecode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NativeCode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/info/types.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/info/types.ts#L29-L32 type NativeCode struct { BuildHash string `json:"build_hash"` Version string `json:"version"` @@ -54,7 +55,7 @@ func (s *NativeCode) UnmarshalJSON(data []byte) error { case "build_hash": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BuildHash", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -65,7 +66,7 @@ func (s *NativeCode) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/nativecodeinformation.go b/typedapi/types/nativecodeinformation.go index bb1f071118..313d0b94e3 100644 --- a/typedapi/types/nativecodeinformation.go +++ b/typedapi/types/nativecodeinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NativeCodeInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/info/types.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/info/types.ts#L29-L32 type NativeCodeInformation struct { BuildHash string `json:"build_hash"` Version string `json:"version"` @@ -54,7 +55,7 @@ func (s *NativeCodeInformation) UnmarshalJSON(data []byte) error { case "build_hash": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BuildHash", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -65,7 +66,7 @@ func (s *NativeCodeInformation) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/nerinferenceoptions.go b/typedapi/types/nerinferenceoptions.go index 1bbf32b931..3c7605efbe 100644 --- a/typedapi/types/nerinferenceoptions.go +++ b/typedapi/types/nerinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NerInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L255-L264 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L255-L264 type NerInferenceOptions struct { // ClassificationLabels The token classification labels. Must be IOB formatted tags ClassificationLabels []string `json:"classification_labels,omitempty"` @@ -59,13 +60,13 @@ func (s *NerInferenceOptions) UnmarshalJSON(data []byte) error { case "classification_labels": if err := dec.Decode(&s.ClassificationLabels); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassificationLabels", err) } case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -76,12 +77,12 @@ func (s *NerInferenceOptions) UnmarshalJSON(data []byte) error { case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } case "vocabulary": if err := dec.Decode(&s.Vocabulary); err != nil { - return err + return fmt.Errorf("%s | %w", "Vocabulary", err) } } diff --git a/typedapi/types/nerinferenceupdateoptions.go b/typedapi/types/nerinferenceupdateoptions.go index d1ced40528..34ae0195c3 100644 --- a/typedapi/types/nerinferenceupdateoptions.go +++ b/typedapi/types/nerinferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NerInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L404-L409 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L404-L409 type NerInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. @@ -57,7 +58,7 @@ func (s *NerInferenceUpdateOptions) UnmarshalJSON(data []byte) error { case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,7 +69,7 @@ func (s *NerInferenceUpdateOptions) UnmarshalJSON(data []byte) error { case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/types/nestedaggregate.go b/typedapi/types/nestedaggregate.go index 85b1da7b7a..e410da98cc 100644 --- a/typedapi/types/nestedaggregate.go +++ b/typedapi/types/nestedaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // NestedAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L486-L487 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L486-L487 type NestedAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *NestedAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -71,7 +71,7 @@ func (s *NestedAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } default: @@ -88,490 +88,490 @@ func (s *NestedAggregate) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -581,7 +581,7 @@ func (s *NestedAggregate) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/nestedaggregation.go b/typedapi/types/nestedaggregation.go index 0dc22cb322..98b06a589e 100644 --- a/typedapi/types/nestedaggregation.go +++ b/typedapi/types/nestedaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NestedAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L636-L641 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L636-L641 type NestedAggregation struct { Meta Metadata `json:"meta,omitempty"` Name *string `json:"name,omitempty"` @@ -55,13 +56,13 @@ func (s *NestedAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,7 +73,7 @@ func (s *NestedAggregation) UnmarshalJSON(data []byte) error { case "path": if err := dec.Decode(&s.Path); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } } diff --git a/typedapi/types/nestedidentity.go b/typedapi/types/nestedidentity.go index 16917b8e01..2650723d37 100644 --- a/typedapi/types/nestedidentity.go +++ b/typedapi/types/nestedidentity.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NestedIdentity type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/hits.ts#L88-L92 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/hits.ts#L88-L92 type NestedIdentity struct { Field string `json:"field"` Nested_ *NestedIdentity `json:"_nested,omitempty"` @@ -54,12 +55,12 @@ func (s *NestedIdentity) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "_nested": if err := dec.Decode(&s.Nested_); err != nil { - return err + return fmt.Errorf("%s | %w", "Nested_", err) } case "offset": @@ -70,7 +71,7 @@ func (s *NestedIdentity) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Offset", err) } s.Offset = value case float64: diff --git a/typedapi/types/nestedproperty.go b/typedapi/types/nestedproperty.go index a2ba71c927..1d68b5b14d 100644 --- a/typedapi/types/nestedproperty.go +++ b/typedapi/types/nestedproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // NestedProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/complex.ts#L39-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/complex.ts#L39-L44 type NestedProperty struct { CopyTo []string `json:"copy_to,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` @@ -70,19 +71,19 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "enabled": @@ -92,7 +93,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = &value case bool: @@ -414,7 +415,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -429,7 +430,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeInParent", err) } s.IncludeInParent = &value case bool: @@ -443,7 +444,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeInRoot", err) } s.IncludeInRoot = &value case bool: @@ -455,7 +456,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -768,7 +769,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -784,7 +785,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -793,7 +794,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/nestedquery.go b/typedapi/types/nestedquery.go index 65cde09c92..98a474e8f2 100644 --- a/typedapi/types/nestedquery.go +++ b/typedapi/types/nestedquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // NestedQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/joining.ts#L106-L130 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/joining.ts#L106-L130 type NestedQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -77,7 +78,7 @@ func (s *NestedQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -93,7 +94,7 @@ func (s *NestedQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnmapped", err) } s.IgnoreUnmapped = &value case bool: @@ -102,23 +103,23 @@ func (s *NestedQuery) UnmarshalJSON(data []byte) error { case "inner_hits": if err := dec.Decode(&s.InnerHits); err != nil { - return err + return fmt.Errorf("%s | %w", "InnerHits", err) } case "path": if err := dec.Decode(&s.Path); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -129,7 +130,7 @@ func (s *NestedQuery) UnmarshalJSON(data []byte) error { case "score_mode": if err := dec.Decode(&s.ScoreMode); err != nil { - return err + return fmt.Errorf("%s | %w", "ScoreMode", err) } } diff --git a/typedapi/types/nestedsortvalue.go b/typedapi/types/nestedsortvalue.go index 134b768701..93bdc888e9 100644 --- a/typedapi/types/nestedsortvalue.go +++ b/typedapi/types/nestedsortvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NestedSortValue type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/sort.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/sort.ts#L30-L35 type NestedSortValue struct { Filter *Query `json:"filter,omitempty"` MaxChildren *int `json:"max_children,omitempty"` @@ -55,7 +56,7 @@ func (s *NestedSortValue) UnmarshalJSON(data []byte) error { case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "max_children": @@ -66,7 +67,7 @@ func (s *NestedSortValue) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxChildren", err) } s.MaxChildren = &value case float64: @@ -76,12 +77,12 @@ func (s *NestedSortValue) UnmarshalJSON(data []byte) error { case "nested": if err := dec.Decode(&s.Nested); err != nil { - return err + return fmt.Errorf("%s | %w", "Nested", err) } case "path": if err := dec.Decode(&s.Path); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } } diff --git a/typedapi/types/nevercondition.go b/typedapi/types/nevercondition.go index 2b0b580231..2c6d6deb42 100644 --- a/typedapi/types/nevercondition.go +++ b/typedapi/types/nevercondition.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NeverCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Conditions.ts#L69-L69 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Conditions.ts#L69-L69 type NeverCondition struct { } diff --git a/typedapi/types/ngramtokenfilter.go b/typedapi/types/ngramtokenfilter.go index 1bea14fece..878f4f0a8e 100644 --- a/typedapi/types/ngramtokenfilter.go +++ b/typedapi/types/ngramtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NGramTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L266-L271 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L266-L271 type NGramTokenFilter struct { MaxGram *int `json:"max_gram,omitempty"` MinGram *int `json:"min_gram,omitempty"` @@ -62,7 +63,7 @@ func (s *NGramTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxGram", err) } s.MaxGram = &value case float64: @@ -78,7 +79,7 @@ func (s *NGramTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinGram", err) } s.MinGram = &value case float64: @@ -88,17 +89,17 @@ func (s *NGramTokenFilter) UnmarshalJSON(data []byte) error { case "preserve_original": if err := dec.Decode(&s.PreserveOriginal); err != nil { - return err + return fmt.Errorf("%s | %w", "PreserveOriginal", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/ngramtokenizer.go b/typedapi/types/ngramtokenizer.go index fedef4d1f2..01b2a7fc8a 100644 --- a/typedapi/types/ngramtokenizer.go +++ b/typedapi/types/ngramtokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // NGramTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L39-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L39-L45 type NGramTokenizer struct { CustomTokenChars *string `json:"custom_token_chars,omitempty"` MaxGram int `json:"max_gram"` @@ -60,7 +61,7 @@ func (s *NGramTokenizer) UnmarshalJSON(data []byte) error { case "custom_token_chars": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CustomTokenChars", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -77,7 +78,7 @@ func (s *NGramTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxGram", err) } s.MaxGram = value case float64: @@ -93,7 +94,7 @@ func (s *NGramTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinGram", err) } s.MinGram = value case float64: @@ -103,17 +104,17 @@ func (s *NGramTokenizer) UnmarshalJSON(data []byte) error { case "token_chars": if err := dec.Decode(&s.TokenChars); err != nil { - return err + return fmt.Errorf("%s | %w", "TokenChars", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/nlpberttokenizationconfig.go b/typedapi/types/nlpberttokenizationconfig.go index 429d5204ad..4374ddf016 100644 --- a/typedapi/types/nlpberttokenizationconfig.go +++ b/typedapi/types/nlpberttokenizationconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // NlpBertTokenizationConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L131-L158 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L131-L158 type NlpBertTokenizationConfig struct { // DoLowerCase Should the tokenizer lower case the text DoLowerCase *bool `json:"do_lower_case,omitempty"` @@ -70,7 +71,7 @@ func (s *NlpBertTokenizationConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DoLowerCase", err) } s.DoLowerCase = &value case bool: @@ -85,7 +86,7 @@ func (s *NlpBertTokenizationConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxSequenceLength", err) } s.MaxSequenceLength = &value case float64: @@ -101,7 +102,7 @@ func (s *NlpBertTokenizationConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Span", err) } s.Span = &value case float64: @@ -111,7 +112,7 @@ func (s *NlpBertTokenizationConfig) UnmarshalJSON(data []byte) error { case "truncate": if err := dec.Decode(&s.Truncate); err != nil { - return err + return fmt.Errorf("%s | %w", "Truncate", err) } case "with_special_tokens": @@ -121,7 +122,7 @@ func (s *NlpBertTokenizationConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "WithSpecialTokens", err) } s.WithSpecialTokens = &value case bool: diff --git a/typedapi/types/nlprobertatokenizationconfig.go b/typedapi/types/nlprobertatokenizationconfig.go index 1c51630a0c..c1948abb8c 100644 --- a/typedapi/types/nlprobertatokenizationconfig.go +++ b/typedapi/types/nlprobertatokenizationconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // NlpRobertaTokenizationConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L160-L187 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L160-L187 type NlpRobertaTokenizationConfig struct { // AddPrefixSpace Should the tokenizer prefix input with a space character AddPrefixSpace *bool `json:"add_prefix_space,omitempty"` @@ -70,7 +71,7 @@ func (s *NlpRobertaTokenizationConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AddPrefixSpace", err) } s.AddPrefixSpace = &value case bool: @@ -85,7 +86,7 @@ func (s *NlpRobertaTokenizationConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxSequenceLength", err) } s.MaxSequenceLength = &value case float64: @@ -101,7 +102,7 @@ func (s *NlpRobertaTokenizationConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Span", err) } s.Span = &value case float64: @@ -111,7 +112,7 @@ func (s *NlpRobertaTokenizationConfig) UnmarshalJSON(data []byte) error { case "truncate": if err := dec.Decode(&s.Truncate); err != nil { - return err + return fmt.Errorf("%s | %w", "Truncate", err) } case "with_special_tokens": @@ -121,7 +122,7 @@ func (s *NlpRobertaTokenizationConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "WithSpecialTokens", err) } s.WithSpecialTokens = &value case bool: diff --git a/typedapi/types/nlptokenizationupdateoptions.go b/typedapi/types/nlptokenizationupdateoptions.go index 8a10e5acb2..647e8e4f15 100644 --- a/typedapi/types/nlptokenizationupdateoptions.go +++ b/typedapi/types/nlptokenizationupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // NlpTokenizationUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L356-L361 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L356-L361 type NlpTokenizationUpdateOptions struct { // Span Span options to apply Span *int `json:"span,omitempty"` @@ -63,7 +64,7 @@ func (s *NlpTokenizationUpdateOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Span", err) } s.Span = &value case float64: @@ -73,7 +74,7 @@ func (s *NlpTokenizationUpdateOptions) UnmarshalJSON(data []byte) error { case "truncate": if err := dec.Decode(&s.Truncate); err != nil { - return err + return fmt.Errorf("%s | %w", "Truncate", err) } } diff --git a/typedapi/types/node.go b/typedapi/types/node.go index b7a8088d0c..acaf354bfb 100644 --- a/typedapi/types/node.go +++ b/typedapi/types/node.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Node type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/searchable_snapshots/cache_stats/Response.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/searchable_snapshots/cache_stats/Response.ts#L30-L32 type Node struct { SharedCache Shared `json:"shared_cache"` } diff --git a/typedapi/types/nodeallocationexplanation.go b/typedapi/types/nodeallocationexplanation.go index 7a2f4e27c8..996e5f863f 100644 --- a/typedapi/types/nodeallocationexplanation.go +++ b/typedapi/types/nodeallocationexplanation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // NodeAllocationExplanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/types.ts#L97-L106 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/types.ts#L97-L106 type NodeAllocationExplanation struct { Deciders []AllocationDecision `json:"deciders"` NodeAttributes map[string]string `json:"node_attributes"` @@ -61,7 +62,7 @@ func (s *NodeAllocationExplanation) UnmarshalJSON(data []byte) error { case "deciders": if err := dec.Decode(&s.Deciders); err != nil { - return err + return fmt.Errorf("%s | %w", "Deciders", err) } case "node_attributes": @@ -69,32 +70,32 @@ func (s *NodeAllocationExplanation) UnmarshalJSON(data []byte) error { s.NodeAttributes = make(map[string]string, 0) } if err := dec.Decode(&s.NodeAttributes); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeAttributes", err) } case "node_decision": if err := dec.Decode(&s.NodeDecision); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeDecision", err) } case "node_id": if err := dec.Decode(&s.NodeId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } case "node_name": if err := dec.Decode(&s.NodeName); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeName", err) } case "store": if err := dec.Decode(&s.Store); err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } case "transport_address": if err := dec.Decode(&s.TransportAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "TransportAddress", err) } case "weight_ranking": @@ -105,7 +106,7 @@ func (s *NodeAllocationExplanation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "WeightRanking", err) } s.WeightRanking = value case float64: diff --git a/typedapi/types/nodeattributes.go b/typedapi/types/nodeattributes.go index 91c773ea17..7b16473805 100644 --- a/typedapi/types/nodeattributes.go +++ b/typedapi/types/nodeattributes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // NodeAttributes type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Node.ts#L41-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Node.ts#L41-L58 type NodeAttributes struct { // Attributes Lists node attributes. Attributes map[string]string `json:"attributes"` @@ -68,18 +69,18 @@ func (s *NodeAttributes) UnmarshalJSON(data []byte) error { s.Attributes = make(map[string]string, 0) } if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "ephemeral_id": if err := dec.Decode(&s.EphemeralId); err != nil { - return err + return fmt.Errorf("%s | %w", "EphemeralId", err) } case "external_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ExternalId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -90,22 +91,22 @@ func (s *NodeAttributes) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "transport_address": if err := dec.Decode(&s.TransportAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "TransportAddress", err) } } diff --git a/typedapi/types/nodeattributesrecord.go b/typedapi/types/nodeattributesrecord.go index e6c8245a7f..5641e69f92 100644 --- a/typedapi/types/nodeattributesrecord.go +++ b/typedapi/types/nodeattributesrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeAttributesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/nodeattrs/types.ts#L20-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/nodeattrs/types.ts#L20-L55 type NodeAttributesRecord struct { // Attr The attribute name. Attr *string `json:"attr,omitempty"` @@ -68,7 +69,7 @@ func (s *NodeAttributesRecord) UnmarshalJSON(data []byte) error { case "attr": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Attr", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,7 +81,7 @@ func (s *NodeAttributesRecord) UnmarshalJSON(data []byte) error { case "host", "h": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -92,7 +93,7 @@ func (s *NodeAttributesRecord) UnmarshalJSON(data []byte) error { case "id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,7 +105,7 @@ func (s *NodeAttributesRecord) UnmarshalJSON(data []byte) error { case "ip", "i": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -116,7 +117,7 @@ func (s *NodeAttributesRecord) UnmarshalJSON(data []byte) error { case "node": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -128,7 +129,7 @@ func (s *NodeAttributesRecord) UnmarshalJSON(data []byte) error { case "pid": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pid", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -140,7 +141,7 @@ func (s *NodeAttributesRecord) UnmarshalJSON(data []byte) error { case "port": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Port", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -152,7 +153,7 @@ func (s *NodeAttributesRecord) UnmarshalJSON(data []byte) error { case "value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodebufferpool.go b/typedapi/types/nodebufferpool.go index 57d5171e52..1b17c7912f 100644 --- a/typedapi/types/nodebufferpool.go +++ b/typedapi/types/nodebufferpool.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeBufferPool type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L788-L809 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L788-L809 type NodeBufferPool struct { // Count Number of buffer pools. Count *int64 `json:"count,omitempty"` @@ -66,7 +67,7 @@ func (s *NodeBufferPool) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = &value case float64: @@ -77,7 +78,7 @@ func (s *NodeBufferPool) UnmarshalJSON(data []byte) error { case "total_capacity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalCapacity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -93,7 +94,7 @@ func (s *NodeBufferPool) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalCapacityInBytes", err) } s.TotalCapacityInBytes = &value case float64: @@ -104,7 +105,7 @@ func (s *NodeBufferPool) UnmarshalJSON(data []byte) error { case "used": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Used", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -120,7 +121,7 @@ func (s *NodeBufferPool) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UsedInBytes", err) } s.UsedInBytes = &value case float64: diff --git a/typedapi/types/nodediskusage.go b/typedapi/types/nodediskusage.go index cd1e22f82e..0d60c5e790 100644 --- a/typedapi/types/nodediskusage.go +++ b/typedapi/types/nodediskusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // NodeDiskUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/types.ts#L56-L60 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/types.ts#L56-L60 type NodeDiskUsage struct { LeastAvailable DiskUsage `json:"least_available"` MostAvailable DiskUsage `json:"most_available"` @@ -53,17 +54,17 @@ func (s *NodeDiskUsage) UnmarshalJSON(data []byte) error { case "least_available": if err := dec.Decode(&s.LeastAvailable); err != nil { - return err + return fmt.Errorf("%s | %w", "LeastAvailable", err) } case "most_available": if err := dec.Decode(&s.MostAvailable); err != nil { - return err + return fmt.Errorf("%s | %w", "MostAvailable", err) } case "node_name": if err := dec.Decode(&s.NodeName); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeName", err) } } diff --git a/typedapi/types/nodeids.go b/typedapi/types/nodeids.go index 542cd5100d..7158ab4435 100644 --- a/typedapi/types/nodeids.go +++ b/typedapi/types/nodeids.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodeIds type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L64-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L64-L64 type NodeIds []string diff --git a/typedapi/types/nodeinfo.go b/typedapi/types/nodeinfo.go index cb1f4a2cae..20d6fbc083 100644 --- a/typedapi/types/nodeinfo.go +++ b/typedapi/types/nodeinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // NodeInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L31-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L31-L67 type NodeInfo struct { Aggregations map[string]NodeInfoAggregation `json:"aggregations,omitempty"` Attributes map[string]string `json:"attributes"` @@ -90,7 +91,7 @@ func (s *NodeInfo) UnmarshalJSON(data []byte) error { s.Aggregations = make(map[string]NodeInfoAggregation, 0) } if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "attributes": @@ -98,13 +99,13 @@ func (s *NodeInfo) UnmarshalJSON(data []byte) error { s.Attributes = make(map[string]string, 0) } if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "build_flavor": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BuildFlavor", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -116,7 +117,7 @@ func (s *NodeInfo) UnmarshalJSON(data []byte) error { case "build_hash": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BuildHash", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -128,7 +129,7 @@ func (s *NodeInfo) UnmarshalJSON(data []byte) error { case "build_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BuildType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -139,67 +140,67 @@ func (s *NodeInfo) UnmarshalJSON(data []byte) error { case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "http": if err := dec.Decode(&s.Http); err != nil { - return err + return fmt.Errorf("%s | %w", "Http", err) } case "ingest": if err := dec.Decode(&s.Ingest); err != nil { - return err + return fmt.Errorf("%s | %w", "Ingest", err) } case "ip": if err := dec.Decode(&s.Ip); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } case "jvm": if err := dec.Decode(&s.Jvm); err != nil { - return err + return fmt.Errorf("%s | %w", "Jvm", err) } case "modules": if err := dec.Decode(&s.Modules); err != nil { - return err + return fmt.Errorf("%s | %w", "Modules", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "network": if err := dec.Decode(&s.Network); err != nil { - return err + return fmt.Errorf("%s | %w", "Network", err) } case "os": if err := dec.Decode(&s.Os); err != nil { - return err + return fmt.Errorf("%s | %w", "Os", err) } case "plugins": if err := dec.Decode(&s.Plugins); err != nil { - return err + return fmt.Errorf("%s | %w", "Plugins", err) } case "process": if err := dec.Decode(&s.Process); err != nil { - return err + return fmt.Errorf("%s | %w", "Process", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "settings": if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "thread_pool": @@ -207,7 +208,7 @@ func (s *NodeInfo) UnmarshalJSON(data []byte) error { s.ThreadPool = make(map[string]NodeThreadPoolInfo, 0) } if err := dec.Decode(&s.ThreadPool); err != nil { - return err + return fmt.Errorf("%s | %w", "ThreadPool", err) } case "total_indexing_buffer": @@ -217,7 +218,7 @@ func (s *NodeInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalIndexingBuffer", err) } s.TotalIndexingBuffer = &value case float64: @@ -227,22 +228,22 @@ func (s *NodeInfo) UnmarshalJSON(data []byte) error { case "total_indexing_buffer_in_bytes": if err := dec.Decode(&s.TotalIndexingBufferInBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalIndexingBufferInBytes", err) } case "transport": if err := dec.Decode(&s.Transport); err != nil { - return err + return fmt.Errorf("%s | %w", "Transport", err) } case "transport_address": if err := dec.Decode(&s.TransportAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "TransportAddress", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/nodeinfoaction.go b/typedapi/types/nodeinfoaction.go index 95721711ca..0d5f2b4679 100644 --- a/typedapi/types/nodeinfoaction.go +++ b/typedapi/types/nodeinfoaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L181-L183 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L181-L183 type NodeInfoAction struct { DestructiveRequiresName string `json:"destructive_requires_name"` } @@ -53,7 +54,7 @@ func (s *NodeInfoAction) UnmarshalJSON(data []byte) error { case "destructive_requires_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DestructiveRequiresName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfoaggregation.go b/typedapi/types/nodeinfoaggregation.go index 60a173b9e9..b4ab1b392c 100644 --- a/typedapi/types/nodeinfoaggregation.go +++ b/typedapi/types/nodeinfoaggregation.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodeInfoAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L232-L234 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L232-L234 type NodeInfoAggregation struct { Types []string `json:"types"` } diff --git a/typedapi/types/nodeinfobootstrap.go b/typedapi/types/nodeinfobootstrap.go index 8e33ac861b..e838484efd 100644 --- a/typedapi/types/nodeinfobootstrap.go +++ b/typedapi/types/nodeinfobootstrap.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoBootstrap type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L201-L203 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L201-L203 type NodeInfoBootstrap struct { MemoryLock string `json:"memory_lock"` } @@ -53,7 +54,7 @@ func (s *NodeInfoBootstrap) UnmarshalJSON(data []byte) error { case "memory_lock": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MemoryLock", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfoclient.go b/typedapi/types/nodeinfoclient.go index 632ee0d510..ef9c995ce4 100644 --- a/typedapi/types/nodeinfoclient.go +++ b/typedapi/types/nodeinfoclient.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoClient type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L185-L187 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L185-L187 type NodeInfoClient struct { Type string `json:"type"` } @@ -53,7 +54,7 @@ func (s *NodeInfoClient) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfodiscover.go b/typedapi/types/nodeinfodiscover.go index 99e99a4aa7..d159ac8b2c 100644 --- a/typedapi/types/nodeinfodiscover.go +++ b/typedapi/types/nodeinfodiscover.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -31,7 +31,7 @@ import ( // NodeInfoDiscover type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L173-L179 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L173-L179 type NodeInfoDiscover struct { NodeInfoDiscover map[string]json.RawMessage `json:"-"` SeedHosts []string `json:"seed_hosts,omitempty"` @@ -56,18 +56,18 @@ func (s *NodeInfoDiscover) UnmarshalJSON(data []byte) error { case "seed_hosts": if err := dec.Decode(&s.SeedHosts); err != nil { - return err + return fmt.Errorf("%s | %w", "SeedHosts", err) } case "seed_providers": if err := dec.Decode(&s.SeedProviders); err != nil { - return err + return fmt.Errorf("%s | %w", "SeedProviders", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,7 +84,7 @@ func (s *NodeInfoDiscover) UnmarshalJSON(data []byte) error { } raw := new(json.RawMessage) if err := dec.Decode(&raw); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeInfoDiscover", err) } s.NodeInfoDiscover[key] = *raw } diff --git a/typedapi/types/nodeinfohttp.go b/typedapi/types/nodeinfohttp.go index 61f679cf82..198c4f15aa 100644 --- a/typedapi/types/nodeinfohttp.go +++ b/typedapi/types/nodeinfohttp.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoHttp type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L303-L308 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L303-L308 type NodeInfoHttp struct { BoundAddress []string `json:"bound_address"` MaxContentLength ByteSize `json:"max_content_length,omitempty"` @@ -55,12 +56,12 @@ func (s *NodeInfoHttp) UnmarshalJSON(data []byte) error { case "bound_address": if err := dec.Decode(&s.BoundAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "BoundAddress", err) } case "max_content_length": if err := dec.Decode(&s.MaxContentLength); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxContentLength", err) } case "max_content_length_in_bytes": @@ -70,7 +71,7 @@ func (s *NodeInfoHttp) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxContentLengthInBytes", err) } s.MaxContentLengthInBytes = value case float64: @@ -81,7 +82,7 @@ func (s *NodeInfoHttp) UnmarshalJSON(data []byte) error { case "publish_address": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PublishAddress", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfoingest.go b/typedapi/types/nodeinfoingest.go index 3262cff0b8..a1a71c51d2 100644 --- a/typedapi/types/nodeinfoingest.go +++ b/typedapi/types/nodeinfoingest.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodeInfoIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L224-L226 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L224-L226 type NodeInfoIngest struct { Processors []NodeInfoIngestProcessor `json:"processors"` } diff --git a/typedapi/types/nodeinfoingestdownloader.go b/typedapi/types/nodeinfoingestdownloader.go index a7b7e122d9..bbb46ac3d2 100644 --- a/typedapi/types/nodeinfoingestdownloader.go +++ b/typedapi/types/nodeinfoingestdownloader.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoIngestDownloader type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L128-L130 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L128-L130 type NodeInfoIngestDownloader struct { Enabled string `json:"enabled"` } @@ -53,7 +54,7 @@ func (s *NodeInfoIngestDownloader) UnmarshalJSON(data []byte) error { case "enabled": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfoingestinfo.go b/typedapi/types/nodeinfoingestinfo.go index 9c6276fd49..cec6843629 100644 --- a/typedapi/types/nodeinfoingestinfo.go +++ b/typedapi/types/nodeinfoingestinfo.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodeInfoIngestInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L124-L126 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L124-L126 type NodeInfoIngestInfo struct { Downloader NodeInfoIngestDownloader `json:"downloader"` } diff --git a/typedapi/types/nodeinfoingestprocessor.go b/typedapi/types/nodeinfoingestprocessor.go index 85540a0a74..f1b2e64a3c 100644 --- a/typedapi/types/nodeinfoingestprocessor.go +++ b/typedapi/types/nodeinfoingestprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoIngestProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L228-L230 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L228-L230 type NodeInfoIngestProcessor struct { Type string `json:"type"` } @@ -53,7 +54,7 @@ func (s *NodeInfoIngestProcessor) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfojvmmemory.go b/typedapi/types/nodeinfojvmmemory.go index a2f7bf8f0d..b3d6c96ec4 100644 --- a/typedapi/types/nodeinfojvmmemory.go +++ b/typedapi/types/nodeinfojvmmemory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoJvmMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L310-L321 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L310-L321 type NodeInfoJvmMemory struct { DirectMax ByteSize `json:"direct_max,omitempty"` DirectMaxInBytes int64 `json:"direct_max_in_bytes"` @@ -61,7 +62,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { case "direct_max": if err := dec.Decode(&s.DirectMax); err != nil { - return err + return fmt.Errorf("%s | %w", "DirectMax", err) } case "direct_max_in_bytes": @@ -71,7 +72,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DirectMaxInBytes", err) } s.DirectMaxInBytes = value case float64: @@ -81,7 +82,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { case "heap_init": if err := dec.Decode(&s.HeapInit); err != nil { - return err + return fmt.Errorf("%s | %w", "HeapInit", err) } case "heap_init_in_bytes": @@ -91,7 +92,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "HeapInitInBytes", err) } s.HeapInitInBytes = value case float64: @@ -101,7 +102,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { case "heap_max": if err := dec.Decode(&s.HeapMax); err != nil { - return err + return fmt.Errorf("%s | %w", "HeapMax", err) } case "heap_max_in_bytes": @@ -111,7 +112,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "HeapMaxInBytes", err) } s.HeapMaxInBytes = value case float64: @@ -121,7 +122,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { case "non_heap_init": if err := dec.Decode(&s.NonHeapInit); err != nil { - return err + return fmt.Errorf("%s | %w", "NonHeapInit", err) } case "non_heap_init_in_bytes": @@ -131,7 +132,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NonHeapInitInBytes", err) } s.NonHeapInitInBytes = value case float64: @@ -141,7 +142,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { case "non_heap_max": if err := dec.Decode(&s.NonHeapMax); err != nil { - return err + return fmt.Errorf("%s | %w", "NonHeapMax", err) } case "non_heap_max_in_bytes": @@ -151,7 +152,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NonHeapMaxInBytes", err) } s.NonHeapMaxInBytes = value case float64: diff --git a/typedapi/types/nodeinfomemory.go b/typedapi/types/nodeinfomemory.go index 0806029b6b..0838669f5f 100644 --- a/typedapi/types/nodeinfomemory.go +++ b/typedapi/types/nodeinfomemory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L323-L326 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L323-L326 type NodeInfoMemory struct { Total string `json:"total"` TotalInBytes int64 `json:"total_in_bytes"` @@ -54,7 +55,7 @@ func (s *NodeInfoMemory) UnmarshalJSON(data []byte) error { case "total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -70,7 +71,7 @@ func (s *NodeInfoMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalInBytes", err) } s.TotalInBytes = value case float64: diff --git a/typedapi/types/nodeinfonetwork.go b/typedapi/types/nodeinfonetwork.go index 9d534571c1..e5b39b7ba1 100644 --- a/typedapi/types/nodeinfonetwork.go +++ b/typedapi/types/nodeinfonetwork.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoNetwork type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L328-L331 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L328-L331 type NodeInfoNetwork struct { PrimaryInterface NodeInfoNetworkInterface `json:"primary_interface"` RefreshInterval int `json:"refresh_interval"` @@ -53,7 +54,7 @@ func (s *NodeInfoNetwork) UnmarshalJSON(data []byte) error { case "primary_interface": if err := dec.Decode(&s.PrimaryInterface); err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryInterface", err) } case "refresh_interval": @@ -64,7 +65,7 @@ func (s *NodeInfoNetwork) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshInterval", err) } s.RefreshInterval = value case float64: diff --git a/typedapi/types/nodeinfonetworkinterface.go b/typedapi/types/nodeinfonetworkinterface.go index 7d663b3e0f..9f8ef67732 100644 --- a/typedapi/types/nodeinfonetworkinterface.go +++ b/typedapi/types/nodeinfonetworkinterface.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoNetworkInterface type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L333-L337 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L333-L337 type NodeInfoNetworkInterface struct { Address string `json:"address"` MacAddress string `json:"mac_address"` @@ -55,7 +56,7 @@ func (s *NodeInfoNetworkInterface) UnmarshalJSON(data []byte) error { case "address": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Address", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -67,7 +68,7 @@ func (s *NodeInfoNetworkInterface) UnmarshalJSON(data []byte) error { case "mac_address": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MacAddress", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,7 +79,7 @@ func (s *NodeInfoNetworkInterface) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/nodeinfooscpu.go b/typedapi/types/nodeinfooscpu.go index c46f0c5629..9fbff960e7 100644 --- a/typedapi/types/nodeinfooscpu.go +++ b/typedapi/types/nodeinfooscpu.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoOSCPU type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L339-L348 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L339-L348 type NodeInfoOSCPU struct { CacheSize string `json:"cache_size"` CacheSizeInBytes int `json:"cache_size_in_bytes"` @@ -60,7 +61,7 @@ func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { case "cache_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CacheSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -77,7 +78,7 @@ func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CacheSizeInBytes", err) } s.CacheSizeInBytes = value case float64: @@ -93,7 +94,7 @@ func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CoresPerSocket", err) } s.CoresPerSocket = value case float64: @@ -109,7 +110,7 @@ func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Mhz", err) } s.Mhz = value case float64: @@ -120,7 +121,7 @@ func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { case "model": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Model", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -137,7 +138,7 @@ func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalCores", err) } s.TotalCores = value case float64: @@ -153,7 +154,7 @@ func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalSockets", err) } s.TotalSockets = value case float64: @@ -164,7 +165,7 @@ func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { case "vendor": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Vendor", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfopath.go b/typedapi/types/nodeinfopath.go index f8ff467899..5f928c9dc9 100644 --- a/typedapi/types/nodeinfopath.go +++ b/typedapi/types/nodeinfopath.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoPath type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L158-L163 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L158-L163 type NodeInfoPath struct { Data []string `json:"data,omitempty"` Home *string `json:"home,omitempty"` @@ -55,13 +56,13 @@ func (s *NodeInfoPath) UnmarshalJSON(data []byte) error { case "data": if err := dec.Decode(&s.Data); err != nil { - return err + return fmt.Errorf("%s | %w", "Data", err) } case "home": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Home", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -73,7 +74,7 @@ func (s *NodeInfoPath) UnmarshalJSON(data []byte) error { case "logs": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Logs", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,7 +85,7 @@ func (s *NodeInfoPath) UnmarshalJSON(data []byte) error { case "repo": if err := dec.Decode(&s.Repo); err != nil { - return err + return fmt.Errorf("%s | %w", "Repo", err) } } diff --git a/typedapi/types/nodeinforepositories.go b/typedapi/types/nodeinforepositories.go index 76f54153ea..16692fa192 100644 --- a/typedapi/types/nodeinforepositories.go +++ b/typedapi/types/nodeinforepositories.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodeInfoRepositories type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L165-L167 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L165-L167 type NodeInfoRepositories struct { Url NodeInfoRepositoriesUrl `json:"url"` } diff --git a/typedapi/types/nodeinforepositoriesurl.go b/typedapi/types/nodeinforepositoriesurl.go index 9416226b7e..2062b2791f 100644 --- a/typedapi/types/nodeinforepositoriesurl.go +++ b/typedapi/types/nodeinforepositoriesurl.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoRepositoriesUrl type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L169-L171 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L169-L171 type NodeInfoRepositoriesUrl struct { AllowedUrls string `json:"allowed_urls"` } @@ -53,7 +54,7 @@ func (s *NodeInfoRepositoriesUrl) UnmarshalJSON(data []byte) error { case "allowed_urls": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AllowedUrls", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfoscript.go b/typedapi/types/nodeinfoscript.go index 8405a11913..cf81e2b2a3 100644 --- a/typedapi/types/nodeinfoscript.go +++ b/typedapi/types/nodeinfoscript.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoScript type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L281-L284 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L281-L284 type NodeInfoScript struct { AllowedTypes string `json:"allowed_types"` DisableMaxCompilationsRate string `json:"disable_max_compilations_rate"` @@ -54,7 +55,7 @@ func (s *NodeInfoScript) UnmarshalJSON(data []byte) error { case "allowed_types": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AllowedTypes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,7 +67,7 @@ func (s *NodeInfoScript) UnmarshalJSON(data []byte) error { case "disable_max_compilations_rate": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DisableMaxCompilationsRate", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfosearch.go b/typedapi/types/nodeinfosearch.go index a876f77bd4..72596e3e19 100644 --- a/typedapi/types/nodeinfosearch.go +++ b/typedapi/types/nodeinfosearch.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodeInfoSearch type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L286-L288 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L286-L288 type NodeInfoSearch struct { Remote NodeInfoSearchRemote `json:"remote"` } diff --git a/typedapi/types/nodeinfosearchremote.go b/typedapi/types/nodeinfosearchremote.go index b769341c52..dc2de120c2 100644 --- a/typedapi/types/nodeinfosearchremote.go +++ b/typedapi/types/nodeinfosearchremote.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoSearchRemote type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L290-L292 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L290-L292 type NodeInfoSearchRemote struct { Connect string `json:"connect"` } @@ -53,7 +54,7 @@ func (s *NodeInfoSearchRemote) UnmarshalJSON(data []byte) error { case "connect": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Connect", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfosettings.go b/typedapi/types/nodeinfosettings.go index 91886188aa..b81d113b53 100644 --- a/typedapi/types/nodeinfosettings.go +++ b/typedapi/types/nodeinfosettings.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodeInfoSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L69-L85 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L69-L85 type NodeInfoSettings struct { Action *NodeInfoAction `json:"action,omitempty"` Bootstrap *NodeInfoBootstrap `json:"bootstrap,omitempty"` diff --git a/typedapi/types/nodeinfosettingscluster.go b/typedapi/types/nodeinfosettingscluster.go index dfa64b508a..d8c4b15060 100644 --- a/typedapi/types/nodeinfosettingscluster.go +++ b/typedapi/types/nodeinfosettingscluster.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // NodeInfoSettingsCluster type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L132-L142 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L132-L142 type NodeInfoSettingsCluster struct { DeprecationIndexing *DeprecationIndexing `json:"deprecation_indexing,omitempty"` Election NodeInfoSettingsClusterElection `json:"election"` @@ -55,27 +56,27 @@ func (s *NodeInfoSettingsCluster) UnmarshalJSON(data []byte) error { case "deprecation_indexing": if err := dec.Decode(&s.DeprecationIndexing); err != nil { - return err + return fmt.Errorf("%s | %w", "DeprecationIndexing", err) } case "election": if err := dec.Decode(&s.Election); err != nil { - return err + return fmt.Errorf("%s | %w", "Election", err) } case "initial_master_nodes": if err := dec.Decode(&s.InitialMasterNodes); err != nil { - return err + return fmt.Errorf("%s | %w", "InitialMasterNodes", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } } diff --git a/typedapi/types/nodeinfosettingsclusterelection.go b/typedapi/types/nodeinfosettingsclusterelection.go index f2db71996d..d522d0830b 100644 --- a/typedapi/types/nodeinfosettingsclusterelection.go +++ b/typedapi/types/nodeinfosettingsclusterelection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // NodeInfoSettingsClusterElection type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L148-L150 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L148-L150 type NodeInfoSettingsClusterElection struct { Strategy string `json:"strategy"` } @@ -51,7 +52,7 @@ func (s *NodeInfoSettingsClusterElection) UnmarshalJSON(data []byte) error { case "strategy": if err := dec.Decode(&s.Strategy); err != nil { - return err + return fmt.Errorf("%s | %w", "Strategy", err) } } diff --git a/typedapi/types/nodeinfosettingshttp.go b/typedapi/types/nodeinfosettingshttp.go index 18f0fbdf0a..290d7c27b3 100644 --- a/typedapi/types/nodeinfosettingshttp.go +++ b/typedapi/types/nodeinfosettingshttp.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoSettingsHttp type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L189-L194 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L189-L194 type NodeInfoSettingsHttp struct { Compression string `json:"compression,omitempty"` Port string `json:"port,omitempty"` @@ -56,7 +57,7 @@ func (s *NodeInfoSettingsHttp) UnmarshalJSON(data []byte) error { case "compression": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Compression", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,7 +69,7 @@ func (s *NodeInfoSettingsHttp) UnmarshalJSON(data []byte) error { case "port": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Port", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,13 +80,13 @@ func (s *NodeInfoSettingsHttp) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "type.default": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TypeDefault", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfosettingshttptype.go b/typedapi/types/nodeinfosettingshttptype.go index fa1df68a9e..1dc0f308c4 100644 --- a/typedapi/types/nodeinfosettingshttptype.go +++ b/typedapi/types/nodeinfosettingshttptype.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoSettingsHttpType type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L196-L199 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L196-L199 type NodeInfoSettingsHttpType struct { Default string `json:"default"` } @@ -62,7 +63,7 @@ func (s *NodeInfoSettingsHttpType) UnmarshalJSON(data []byte) error { case "default": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Default", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfosettingsingest.go b/typedapi/types/nodeinfosettingsingest.go index e51f9ed1a5..9baf3d57d5 100644 --- a/typedapi/types/nodeinfosettingsingest.go +++ b/typedapi/types/nodeinfosettingsingest.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodeInfoSettingsIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L87-L122 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L87-L122 type NodeInfoSettingsIngest struct { Append *NodeInfoIngestInfo `json:"append,omitempty"` Attachment *NodeInfoIngestInfo `json:"attachment,omitempty"` diff --git a/typedapi/types/nodeinfosettingsnetwork.go b/typedapi/types/nodeinfosettingsnetwork.go index 346e7e69ad..5bd4c4af21 100644 --- a/typedapi/types/nodeinfosettingsnetwork.go +++ b/typedapi/types/nodeinfosettingsnetwork.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // NodeInfoSettingsNetwork type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L220-L222 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L220-L222 type NodeInfoSettingsNetwork struct { Host string `json:"host"` } @@ -51,7 +52,7 @@ func (s *NodeInfoSettingsNetwork) UnmarshalJSON(data []byte) error { case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } } diff --git a/typedapi/types/nodeinfosettingsnode.go b/typedapi/types/nodeinfosettingsnode.go index 449dad8b08..6339d90125 100644 --- a/typedapi/types/nodeinfosettingsnode.go +++ b/typedapi/types/nodeinfosettingsnode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoSettingsNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L152-L156 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L152-L156 type NodeInfoSettingsNode struct { Attr map[string]json.RawMessage `json:"attr"` MaxLocalStorageNodes *string `json:"max_local_storage_nodes,omitempty"` @@ -57,13 +58,13 @@ func (s *NodeInfoSettingsNode) UnmarshalJSON(data []byte) error { s.Attr = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Attr); err != nil { - return err + return fmt.Errorf("%s | %w", "Attr", err) } case "max_local_storage_nodes": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxLocalStorageNodes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,7 +75,7 @@ func (s *NodeInfoSettingsNode) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/nodeinfosettingstransport.go b/typedapi/types/nodeinfosettingstransport.go index e6c9f717b7..750d270516 100644 --- a/typedapi/types/nodeinfosettingstransport.go +++ b/typedapi/types/nodeinfosettingstransport.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoSettingsTransport type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L205-L209 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L205-L209 type NodeInfoSettingsTransport struct { Features *NodeInfoSettingsTransportFeatures `json:"features,omitempty"` Type NodeInfoSettingsTransportType `json:"type"` @@ -54,18 +55,18 @@ func (s *NodeInfoSettingsTransport) UnmarshalJSON(data []byte) error { case "features": if err := dec.Decode(&s.Features); err != nil { - return err + return fmt.Errorf("%s | %w", "Features", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "type.default": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TypeDefault", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfosettingstransportfeatures.go b/typedapi/types/nodeinfosettingstransportfeatures.go index eb77f24bbd..f816f4fd46 100644 --- a/typedapi/types/nodeinfosettingstransportfeatures.go +++ b/typedapi/types/nodeinfosettingstransportfeatures.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoSettingsTransportFeatures type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L216-L218 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L216-L218 type NodeInfoSettingsTransportFeatures struct { XPack string `json:"x-pack"` } @@ -53,7 +54,7 @@ func (s *NodeInfoSettingsTransportFeatures) UnmarshalJSON(data []byte) error { case "x-pack": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "XPack", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfosettingstransporttype.go b/typedapi/types/nodeinfosettingstransporttype.go index 325fe2d12e..9e7c77664e 100644 --- a/typedapi/types/nodeinfosettingstransporttype.go +++ b/typedapi/types/nodeinfosettingstransporttype.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoSettingsTransportType type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L211-L214 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L211-L214 type NodeInfoSettingsTransportType struct { Default string `json:"default"` } @@ -62,7 +63,7 @@ func (s *NodeInfoSettingsTransportType) UnmarshalJSON(data []byte) error { case "default": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Default", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfotransport.go b/typedapi/types/nodeinfotransport.go index 02f44db9ee..213db47867 100644 --- a/typedapi/types/nodeinfotransport.go +++ b/typedapi/types/nodeinfotransport.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoTransport type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L350-L354 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L350-L354 type NodeInfoTransport struct { BoundAddress []string `json:"bound_address"` Profiles map[string]string `json:"profiles"` @@ -54,7 +55,7 @@ func (s *NodeInfoTransport) UnmarshalJSON(data []byte) error { case "bound_address": if err := dec.Decode(&s.BoundAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "BoundAddress", err) } case "profiles": @@ -62,13 +63,13 @@ func (s *NodeInfoTransport) UnmarshalJSON(data []byte) error { s.Profiles = make(map[string]string, 0) } if err := dec.Decode(&s.Profiles); err != nil { - return err + return fmt.Errorf("%s | %w", "Profiles", err) } case "publish_address": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PublishAddress", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfoxpack.go b/typedapi/types/nodeinfoxpack.go index 9051d544a6..9fced33180 100644 --- a/typedapi/types/nodeinfoxpack.go +++ b/typedapi/types/nodeinfoxpack.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // NodeInfoXpack type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L236-L240 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L236-L240 type NodeInfoXpack struct { License *NodeInfoXpackLicense `json:"license,omitempty"` Notification map[string]json.RawMessage `json:"notification,omitempty"` diff --git a/typedapi/types/nodeinfoxpacklicense.go b/typedapi/types/nodeinfoxpacklicense.go index d24935de18..f68b433da5 100644 --- a/typedapi/types/nodeinfoxpacklicense.go +++ b/typedapi/types/nodeinfoxpacklicense.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodeInfoXpackLicense type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L273-L275 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L273-L275 type NodeInfoXpackLicense struct { SelfGenerated NodeInfoXpackLicenseType `json:"self_generated"` } diff --git a/typedapi/types/nodeinfoxpacklicensetype.go b/typedapi/types/nodeinfoxpacklicensetype.go index ce659a041c..5f6a6d683b 100644 --- a/typedapi/types/nodeinfoxpacklicensetype.go +++ b/typedapi/types/nodeinfoxpacklicensetype.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoXpackLicenseType type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L277-L279 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L277-L279 type NodeInfoXpackLicenseType struct { Type string `json:"type"` } @@ -53,7 +54,7 @@ func (s *NodeInfoXpackLicenseType) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfoxpacksecurity.go b/typedapi/types/nodeinfoxpacksecurity.go index 63f63f719b..4579bff36d 100644 --- a/typedapi/types/nodeinfoxpacksecurity.go +++ b/typedapi/types/nodeinfoxpacksecurity.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoXpackSecurity type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L242-L247 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L242-L247 type NodeInfoXpackSecurity struct { Authc *NodeInfoXpackSecurityAuthc `json:"authc,omitempty"` Enabled string `json:"enabled"` @@ -55,13 +56,13 @@ func (s *NodeInfoXpackSecurity) UnmarshalJSON(data []byte) error { case "authc": if err := dec.Decode(&s.Authc); err != nil { - return err + return fmt.Errorf("%s | %w", "Authc", err) } case "enabled": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,12 +73,12 @@ func (s *NodeInfoXpackSecurity) UnmarshalJSON(data []byte) error { case "http": if err := dec.Decode(&s.Http); err != nil { - return err + return fmt.Errorf("%s | %w", "Http", err) } case "transport": if err := dec.Decode(&s.Transport); err != nil { - return err + return fmt.Errorf("%s | %w", "Transport", err) } } diff --git a/typedapi/types/nodeinfoxpacksecurityauthc.go b/typedapi/types/nodeinfoxpacksecurityauthc.go index 5a008090ea..3c6393fb79 100644 --- a/typedapi/types/nodeinfoxpacksecurityauthc.go +++ b/typedapi/types/nodeinfoxpacksecurityauthc.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodeInfoXpackSecurityAuthc type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L253-L256 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L253-L256 type NodeInfoXpackSecurityAuthc struct { Realms NodeInfoXpackSecurityAuthcRealms `json:"realms"` Token NodeInfoXpackSecurityAuthcToken `json:"token"` diff --git a/typedapi/types/nodeinfoxpacksecurityauthcrealms.go b/typedapi/types/nodeinfoxpacksecurityauthcrealms.go index 9627ed94ba..0582c26e86 100644 --- a/typedapi/types/nodeinfoxpacksecurityauthcrealms.go +++ b/typedapi/types/nodeinfoxpacksecurityauthcrealms.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodeInfoXpackSecurityAuthcRealms type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L258-L262 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L258-L262 type NodeInfoXpackSecurityAuthcRealms struct { File map[string]NodeInfoXpackSecurityAuthcRealmsStatus `json:"file,omitempty"` Native map[string]NodeInfoXpackSecurityAuthcRealmsStatus `json:"native,omitempty"` diff --git a/typedapi/types/nodeinfoxpacksecurityauthcrealmsstatus.go b/typedapi/types/nodeinfoxpacksecurityauthcrealmsstatus.go index fb4833f3fe..7ad6c7a6c6 100644 --- a/typedapi/types/nodeinfoxpacksecurityauthcrealmsstatus.go +++ b/typedapi/types/nodeinfoxpacksecurityauthcrealmsstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoXpackSecurityAuthcRealmsStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L268-L271 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L268-L271 type NodeInfoXpackSecurityAuthcRealmsStatus struct { Enabled *string `json:"enabled,omitempty"` Order string `json:"order"` @@ -54,7 +55,7 @@ func (s *NodeInfoXpackSecurityAuthcRealmsStatus) UnmarshalJSON(data []byte) erro case "enabled": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,7 +67,7 @@ func (s *NodeInfoXpackSecurityAuthcRealmsStatus) UnmarshalJSON(data []byte) erro case "order": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfoxpacksecurityauthctoken.go b/typedapi/types/nodeinfoxpacksecurityauthctoken.go index 068cab11a5..778d1ac85d 100644 --- a/typedapi/types/nodeinfoxpacksecurityauthctoken.go +++ b/typedapi/types/nodeinfoxpacksecurityauthctoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeInfoXpackSecurityAuthcToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L264-L266 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L264-L266 type NodeInfoXpackSecurityAuthcToken struct { Enabled string `json:"enabled"` } @@ -53,7 +54,7 @@ func (s *NodeInfoXpackSecurityAuthcToken) UnmarshalJSON(data []byte) error { case "enabled": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeinfoxpacksecurityssl.go b/typedapi/types/nodeinfoxpacksecurityssl.go index eb70d61e8d..4d95494037 100644 --- a/typedapi/types/nodeinfoxpacksecurityssl.go +++ b/typedapi/types/nodeinfoxpacksecurityssl.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodeInfoXpackSecuritySsl type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L249-L251 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L249-L251 type NodeInfoXpackSecuritySsl struct { Ssl map[string]string `json:"ssl"` } diff --git a/typedapi/types/nodejvminfo.go b/typedapi/types/nodejvminfo.go index 001b184c5e..641b8b61aa 100644 --- a/typedapi/types/nodejvminfo.go +++ b/typedapi/types/nodejvminfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeJvmInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L356-L370 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L356-L370 type NodeJvmInfo struct { GcCollectors []string `json:"gc_collectors"` InputArguments []string `json:"input_arguments"` @@ -63,22 +64,22 @@ func (s *NodeJvmInfo) UnmarshalJSON(data []byte) error { case "gc_collectors": if err := dec.Decode(&s.GcCollectors); err != nil { - return err + return fmt.Errorf("%s | %w", "GcCollectors", err) } case "input_arguments": if err := dec.Decode(&s.InputArguments); err != nil { - return err + return fmt.Errorf("%s | %w", "InputArguments", err) } case "mem": if err := dec.Decode(&s.Mem); err != nil { - return err + return fmt.Errorf("%s | %w", "Mem", err) } case "memory_pools": if err := dec.Decode(&s.MemoryPools); err != nil { - return err + return fmt.Errorf("%s | %w", "MemoryPools", err) } case "pid": @@ -89,7 +90,7 @@ func (s *NodeJvmInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Pid", err) } s.Pid = value case float64: @@ -99,7 +100,7 @@ func (s *NodeJvmInfo) UnmarshalJSON(data []byte) error { case "start_time_in_millis": if err := dec.Decode(&s.StartTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTimeInMillis", err) } case "using_bundled_jdk", "bundled_jdk": @@ -109,7 +110,7 @@ func (s *NodeJvmInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UsingBundledJdk", err) } s.UsingBundledJdk = value case bool: @@ -119,7 +120,7 @@ func (s *NodeJvmInfo) UnmarshalJSON(data []byte) error { case "using_compressed_ordinary_object_pointers": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UsingCompressedOrdinaryObjectPointers", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -130,18 +131,18 @@ func (s *NodeJvmInfo) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "vm_name": if err := dec.Decode(&s.VmName); err != nil { - return err + return fmt.Errorf("%s | %w", "VmName", err) } case "vm_vendor": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "VmVendor", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -152,7 +153,7 @@ func (s *NodeJvmInfo) UnmarshalJSON(data []byte) error { case "vm_version": if err := dec.Decode(&s.VmVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "VmVersion", err) } } diff --git a/typedapi/types/nodeoperatingsysteminfo.go b/typedapi/types/nodeoperatingsysteminfo.go index 187b630034..52fe13aa88 100644 --- a/typedapi/types/nodeoperatingsysteminfo.go +++ b/typedapi/types/nodeoperatingsysteminfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeOperatingSystemInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L372-L389 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L372-L389 type NodeOperatingSystemInfo struct { // AllocatedProcessors The number of processors actually used to calculate thread pool size. This // number can be set with the node.processors setting of a node and defaults to @@ -75,7 +76,7 @@ func (s *NodeOperatingSystemInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllocatedProcessors", err) } s.AllocatedProcessors = &value case float64: @@ -86,7 +87,7 @@ func (s *NodeOperatingSystemInfo) UnmarshalJSON(data []byte) error { case "arch": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Arch", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -103,7 +104,7 @@ func (s *NodeOperatingSystemInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AvailableProcessors", err) } s.AvailableProcessors = value case float64: @@ -113,37 +114,37 @@ func (s *NodeOperatingSystemInfo) UnmarshalJSON(data []byte) error { case "cpu": if err := dec.Decode(&s.Cpu); err != nil { - return err + return fmt.Errorf("%s | %w", "Cpu", err) } case "mem": if err := dec.Decode(&s.Mem); err != nil { - return err + return fmt.Errorf("%s | %w", "Mem", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "pretty_name": if err := dec.Decode(&s.PrettyName); err != nil { - return err + return fmt.Errorf("%s | %w", "PrettyName", err) } case "refresh_interval_in_millis": if err := dec.Decode(&s.RefreshIntervalInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshIntervalInMillis", err) } case "swap": if err := dec.Decode(&s.Swap); err != nil { - return err + return fmt.Errorf("%s | %w", "Swap", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/nodepackagingtype.go b/typedapi/types/nodepackagingtype.go index 0399e000dd..7f52b0b2c0 100644 --- a/typedapi/types/nodepackagingtype.go +++ b/typedapi/types/nodepackagingtype.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodePackagingType type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L526-L539 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L526-L539 type NodePackagingType struct { // Count Number of selected nodes using the distribution flavor and file type. Count int `json:"count"` @@ -63,7 +64,7 @@ func (s *NodePackagingType) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -74,7 +75,7 @@ func (s *NodePackagingType) UnmarshalJSON(data []byte) error { case "flavor": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Flavor", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -86,7 +87,7 @@ func (s *NodePackagingType) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeprocessinfo.go b/typedapi/types/nodeprocessinfo.go index f16d1d881c..214dd5e823 100644 --- a/typedapi/types/nodeprocessinfo.go +++ b/typedapi/types/nodeprocessinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeProcessInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L391-L398 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L391-L398 type NodeProcessInfo struct { // Id Process identifier (PID) Id int64 `json:"id"` @@ -62,7 +63,7 @@ func (s *NodeProcessInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } s.Id = value case float64: @@ -77,7 +78,7 @@ func (s *NodeProcessInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Mlockall", err) } s.Mlockall = value case bool: @@ -86,7 +87,7 @@ func (s *NodeProcessInfo) UnmarshalJSON(data []byte) error { case "refresh_interval_in_millis": if err := dec.Decode(&s.RefreshIntervalInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshIntervalInMillis", err) } } diff --git a/typedapi/types/nodereloaderror.go b/typedapi/types/nodereloaderror.go index f4a8d14d16..aebb1fd0d6 100644 --- a/typedapi/types/nodereloaderror.go +++ b/typedapi/types/nodereloaderror.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // NodeReloadError type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/NodeReloadResult.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/NodeReloadResult.ts#L24-L27 type NodeReloadError struct { Name string `json:"name"` ReloadException *ErrorCause `json:"reload_exception,omitempty"` @@ -52,12 +53,12 @@ func (s *NodeReloadError) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "reload_exception": if err := dec.Decode(&s.ReloadException); err != nil { - return err + return fmt.Errorf("%s | %w", "ReloadException", err) } } diff --git a/typedapi/types/nodereloadresult.go b/typedapi/types/nodereloadresult.go index cc6b0acc23..d43b9cfaec 100644 --- a/typedapi/types/nodereloadresult.go +++ b/typedapi/types/nodereloadresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // Stats // NodeReloadError // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/NodeReloadResult.ts#L29-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/NodeReloadResult.ts#L29-L30 type NodeReloadResult interface{} diff --git a/typedapi/types/nodescontext.go b/typedapi/types/nodescontext.go index ffc223f02a..2b792fe76f 100644 --- a/typedapi/types/nodescontext.go +++ b/typedapi/types/nodescontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodesContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L997-L1002 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L997-L1002 type NodesContext struct { CacheEvictions *int64 `json:"cache_evictions,omitempty"` CompilationLimitTriggered *int64 `json:"compilation_limit_triggered,omitempty"` @@ -60,7 +61,7 @@ func (s *NodesContext) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CacheEvictions", err) } s.CacheEvictions = &value case float64: @@ -75,7 +76,7 @@ func (s *NodesContext) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CompilationLimitTriggered", err) } s.CompilationLimitTriggered = &value case float64: @@ -90,7 +91,7 @@ func (s *NodesContext) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Compilations", err) } s.Compilations = &value case float64: @@ -101,7 +102,7 @@ func (s *NodesContext) UnmarshalJSON(data []byte) error { case "context": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Context", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodescredentials.go b/typedapi/types/nodescredentials.go index 8c1ccc9e8c..f45506ce97 100644 --- a/typedapi/types/nodescredentials.go +++ b/typedapi/types/nodescredentials.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodesCredentials type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_service_credentials/types.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_service_credentials/types.ts#L23-L28 type NodesCredentials struct { // FileTokens File-backed tokens collected from all nodes FileTokens map[string]NodesCredentialsFileToken `json:"file_tokens"` diff --git a/typedapi/types/nodescredentialsfiletoken.go b/typedapi/types/nodescredentialsfiletoken.go index 33e9214ca9..8ea42c6b83 100644 --- a/typedapi/types/nodescredentialsfiletoken.go +++ b/typedapi/types/nodescredentialsfiletoken.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodesCredentialsFileToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_service_credentials/types.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_service_credentials/types.ts#L30-L32 type NodesCredentialsFileToken struct { Nodes []string `json:"nodes"` } diff --git a/typedapi/types/nodeshard.go b/typedapi/types/nodeshard.go index 06951e8ed0..3563b39af3 100644 --- a/typedapi/types/nodeshard.go +++ b/typedapi/types/nodeshard.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // NodeShard type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Node.ts#L60-L71 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Node.ts#L60-L71 type NodeShard struct { AllocationId map[string]string `json:"allocation_id,omitempty"` Index string `json:"index"` @@ -66,17 +67,17 @@ func (s *NodeShard) UnmarshalJSON(data []byte) error { s.AllocationId = make(map[string]string, 0) } if err := dec.Decode(&s.AllocationId); err != nil { - return err + return fmt.Errorf("%s | %w", "AllocationId", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "node": if err := dec.Decode(&s.Node); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } case "primary": @@ -86,7 +87,7 @@ func (s *NodeShard) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Primary", err) } s.Primary = value case bool: @@ -98,17 +99,17 @@ func (s *NodeShard) UnmarshalJSON(data []byte) error { s.RecoverySource = make(map[string]string, 0) } if err := dec.Decode(&s.RecoverySource); err != nil { - return err + return fmt.Errorf("%s | %w", "RecoverySource", err) } case "relocating_node": if err := dec.Decode(&s.RelocatingNode); err != nil { - return err + return fmt.Errorf("%s | %w", "RelocatingNode", err) } case "relocation_failure_info": if err := dec.Decode(&s.RelocationFailureInfo); err != nil { - return err + return fmt.Errorf("%s | %w", "RelocationFailureInfo", err) } case "shard": @@ -119,7 +120,7 @@ func (s *NodeShard) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Shard", err) } s.Shard = value case float64: @@ -129,12 +130,12 @@ func (s *NodeShard) UnmarshalJSON(data []byte) error { case "state": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } case "unassigned_info": if err := dec.Decode(&s.UnassignedInfo); err != nil { - return err + return fmt.Errorf("%s | %w", "UnassignedInfo", err) } } diff --git a/typedapi/types/nodeshutdownstatus.go b/typedapi/types/nodeshutdownstatus.go index 278281a149..68d1fcb874 100644 --- a/typedapi/types/nodeshutdownstatus.go +++ b/typedapi/types/nodeshutdownstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // NodeShutdownStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L29-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L29-L38 type NodeShutdownStatus struct { NodeId string `json:"node_id"` PersistentTasks PersistentTaskStatus `json:"persistent_tasks"` @@ -62,23 +63,23 @@ func (s *NodeShutdownStatus) UnmarshalJSON(data []byte) error { case "node_id": if err := dec.Decode(&s.NodeId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } case "persistent_tasks": if err := dec.Decode(&s.PersistentTasks); err != nil { - return err + return fmt.Errorf("%s | %w", "PersistentTasks", err) } case "plugins": if err := dec.Decode(&s.Plugins); err != nil { - return err + return fmt.Errorf("%s | %w", "Plugins", err) } case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -89,22 +90,22 @@ func (s *NodeShutdownStatus) UnmarshalJSON(data []byte) error { case "shard_migration": if err := dec.Decode(&s.ShardMigration); err != nil { - return err + return fmt.Errorf("%s | %w", "ShardMigration", err) } case "shutdown_startedmillis": if err := dec.Decode(&s.ShutdownStartedmillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ShutdownStartedmillis", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/nodesindexingpressure.go b/typedapi/types/nodesindexingpressure.go index 25b2398ed3..34043b47fa 100644 --- a/typedapi/types/nodesindexingpressure.go +++ b/typedapi/types/nodesindexingpressure.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodesIndexingPressure type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L116-L121 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L116-L121 type NodesIndexingPressure struct { // Memory Contains statistics for memory consumption from indexing load. Memory *NodesIndexingPressureMemory `json:"memory,omitempty"` diff --git a/typedapi/types/nodesindexingpressurememory.go b/typedapi/types/nodesindexingpressurememory.go index 9003dfd217..d347ed32d4 100644 --- a/typedapi/types/nodesindexingpressurememory.go +++ b/typedapi/types/nodesindexingpressurememory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodesIndexingPressureMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L123-L142 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L123-L142 type NodesIndexingPressureMemory struct { // Current Contains statistics for current indexing load. Current *PressureMemory `json:"current,omitempty"` @@ -61,12 +62,12 @@ func (s *NodesIndexingPressureMemory) UnmarshalJSON(data []byte) error { case "current": if err := dec.Decode(&s.Current); err != nil { - return err + return fmt.Errorf("%s | %w", "Current", err) } case "limit": if err := dec.Decode(&s.Limit); err != nil { - return err + return fmt.Errorf("%s | %w", "Limit", err) } case "limit_in_bytes": @@ -76,7 +77,7 @@ func (s *NodesIndexingPressureMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LimitInBytes", err) } s.LimitInBytes = &value case float64: @@ -86,7 +87,7 @@ func (s *NodesIndexingPressureMemory) UnmarshalJSON(data []byte) error { case "total": if err := dec.Decode(&s.Total); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } } diff --git a/typedapi/types/nodesingest.go b/typedapi/types/nodesingest.go index 70026d9793..88a0fa5189 100644 --- a/typedapi/types/nodesingest.go +++ b/typedapi/types/nodesingest.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // NodesIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L345-L354 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L345-L354 type NodesIngest struct { // Pipelines Contains statistics about ingest pipelines for the node. Pipelines map[string]IngestTotal `json:"pipelines,omitempty"` diff --git a/typedapi/types/nodesrecord.go b/typedapi/types/nodesrecord.go index b3058f0775..7597548393 100644 --- a/typedapi/types/nodesrecord.go +++ b/typedapi/types/nodesrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/nodes/types.ts#L23-L542 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/nodes/types.ts#L23-L542 type NodesRecord struct { // Build The Elasticsearch build hash. Build *string `json:"build,omitempty"` @@ -251,7 +252,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "build", "b": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Build", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -263,7 +264,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "bulk.avg_size_in_bytes", "basi", "bulkAvgSizeInBytes": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkAvgSizeInBytes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -275,7 +276,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "bulk.avg_time", "bati", "bulkAvgTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkAvgTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -287,7 +288,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "bulk.total_operations", "bto", "bulkTotalOperations": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkTotalOperations", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -299,7 +300,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "bulk.total_size_in_bytes", "btsi", "bulkTotalSizeInBytes": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkTotalSizeInBytes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -311,7 +312,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "bulk.total_time", "btti", "bulkTotalTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -323,7 +324,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "completion.size", "cs", "completionSize": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CompletionSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -335,7 +336,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "cpu": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Cpu", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -346,28 +347,28 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "disk.avail", "d", "da", "disk", "diskAvail": if err := dec.Decode(&s.DiskAvail); err != nil { - return err + return fmt.Errorf("%s | %w", "DiskAvail", err) } case "disk.total", "dt", "diskTotal": if err := dec.Decode(&s.DiskTotal); err != nil { - return err + return fmt.Errorf("%s | %w", "DiskTotal", err) } case "disk.used", "du", "diskUsed": if err := dec.Decode(&s.DiskUsed); err != nil { - return err + return fmt.Errorf("%s | %w", "DiskUsed", err) } case "disk.used_percent", "dup", "diskUsedPercent": if err := dec.Decode(&s.DiskUsedPercent); err != nil { - return err + return fmt.Errorf("%s | %w", "DiskUsedPercent", err) } case "fielddata.evictions", "fe", "fielddataEvictions": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FielddataEvictions", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -379,7 +380,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "fielddata.memory_size", "fm", "fielddataMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FielddataMemorySize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -391,7 +392,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "file_desc.current", "fdc", "fileDescriptorCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FileDescCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -403,7 +404,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "file_desc.max", "fdm", "fileDescriptorMax": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FileDescMax", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -414,13 +415,13 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "file_desc.percent", "fdp", "fileDescriptorPercent": if err := dec.Decode(&s.FileDescPercent); err != nil { - return err + return fmt.Errorf("%s | %w", "FileDescPercent", err) } case "flavor", "f": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Flavor", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -432,7 +433,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "flush.total", "ft", "flushTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FlushTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -444,7 +445,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "flush.total_time", "ftt", "flushTotalTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FlushTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -456,7 +457,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "get.current", "gc", "getCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -468,7 +469,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "get.exists_time", "geti", "getExistsTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetExistsTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -480,7 +481,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "get.exists_total", "geto", "getExistsTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetExistsTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -492,7 +493,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "get.missing_time", "gmti", "getMissingTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetMissingTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -504,7 +505,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "get.missing_total", "gmto", "getMissingTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetMissingTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -516,7 +517,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "get.time", "gti", "getTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -528,7 +529,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "get.total", "gto", "getTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -540,7 +541,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "heap.current", "hc", "heapCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "HeapCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -552,7 +553,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "heap.max", "hm", "heapMax": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "HeapMax", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -563,13 +564,13 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "heap.percent", "hp", "heapPercent": if err := dec.Decode(&s.HeapPercent); err != nil { - return err + return fmt.Errorf("%s | %w", "HeapPercent", err) } case "http_address", "http": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "HttpAddress", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -580,13 +581,13 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "id", "nodeId": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "indexing.delete_current", "idc", "indexingDeleteCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingDeleteCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -598,7 +599,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "indexing.delete_time", "idti", "indexingDeleteTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingDeleteTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -610,7 +611,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "indexing.delete_total", "idto", "indexingDeleteTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingDeleteTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -622,7 +623,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "indexing.index_current", "iic", "indexingIndexCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingIndexCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -634,7 +635,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "indexing.index_failed", "iif", "indexingIndexFailed": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingIndexFailed", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -646,7 +647,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "indexing.index_time", "iiti", "indexingIndexTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingIndexTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -658,7 +659,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "indexing.index_total", "iito", "indexingIndexTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingIndexTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -670,7 +671,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "ip", "i": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -682,7 +683,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "jdk", "j": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Jdk", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -694,7 +695,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "load_15m", "l": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Load15M", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -706,7 +707,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "load_1m": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Load1M", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -718,7 +719,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "load_5m": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Load5M", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -730,7 +731,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "master", "m": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Master", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -742,7 +743,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "merges.current", "mc", "mergesCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -754,7 +755,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "merges.current_docs", "mcd", "mergesCurrentDocs": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesCurrentDocs", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -766,7 +767,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "merges.current_size", "mcs", "mergesCurrentSize": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesCurrentSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -778,7 +779,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "merges.total", "mt", "mergesTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -790,7 +791,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "merges.total_docs", "mtd", "mergesTotalDocs": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesTotalDocs", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -802,7 +803,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "merges.total_size", "mts", "mergesTotalSize": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesTotalSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -814,7 +815,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "merges.total_time", "mtt", "mergesTotalTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -825,13 +826,13 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "name", "n": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "node.role", "r", "role", "nodeRole": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeRole", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -843,7 +844,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "pid", "p": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pid", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -855,7 +856,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "port", "po": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Port", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -867,7 +868,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "query_cache.evictions", "qce", "queryCacheEvictions": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryCacheEvictions", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -879,7 +880,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "query_cache.hit_count", "qchc", "queryCacheHitCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryCacheHitCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -891,7 +892,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "query_cache.memory_size", "qcm", "queryCacheMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryCacheMemorySize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -903,7 +904,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "query_cache.miss_count", "qcmc", "queryCacheMissCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryCacheMissCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -915,7 +916,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "ram.current", "rc", "ramCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RamCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -927,7 +928,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "ram.max", "rn", "ramMax": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RamMax", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -938,13 +939,13 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "ram.percent", "rp", "ramPercent": if err := dec.Decode(&s.RamPercent); err != nil { - return err + return fmt.Errorf("%s | %w", "RamPercent", err) } case "refresh.external_time", "rti", "refreshTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshExternalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -956,7 +957,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "refresh.external_total", "rto", "refreshTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshExternalTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -968,7 +969,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "refresh.listeners", "rli", "refreshListeners": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshListeners", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -980,7 +981,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "refresh.time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -992,7 +993,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "refresh.total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1004,7 +1005,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "request_cache.evictions", "rce", "requestCacheEvictions": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RequestCacheEvictions", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1016,7 +1017,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "request_cache.hit_count", "rchc", "requestCacheHitCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RequestCacheHitCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1028,7 +1029,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "request_cache.memory_size", "rcm", "requestCacheMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RequestCacheMemorySize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1040,7 +1041,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "request_cache.miss_count", "rcmc", "requestCacheMissCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RequestCacheMissCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1052,7 +1053,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "script.cache_evictions", "scrce", "scriptCacheEvictions": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptCacheEvictions", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1064,7 +1065,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "script.compilation_limit_triggered", "scrclt", "scriptCacheCompilationLimitTriggered": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptCompilationLimitTriggered", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1076,7 +1077,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "script.compilations", "scrcc", "scriptCompilations": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptCompilations", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1088,7 +1089,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "search.fetch_current", "sfc", "searchFetchCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchFetchCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1100,7 +1101,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "search.fetch_time", "sfti", "searchFetchTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchFetchTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1112,7 +1113,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "search.fetch_total", "sfto", "searchFetchTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchFetchTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1124,7 +1125,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "search.open_contexts", "so", "searchOpenContexts": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchOpenContexts", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1136,7 +1137,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "search.query_current", "sqc", "searchQueryCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchQueryCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1148,7 +1149,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "search.query_time", "sqti", "searchQueryTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchQueryTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1160,7 +1161,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "search.query_total", "sqto", "searchQueryTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchQueryTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1172,7 +1173,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "search.scroll_current", "scc", "searchScrollCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchScrollCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1184,7 +1185,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "search.scroll_time", "scti", "searchScrollTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchScrollTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1196,7 +1197,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "search.scroll_total", "scto", "searchScrollTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchScrollTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1208,7 +1209,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "segments.count", "sc", "segmentsCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1220,7 +1221,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "segments.fixed_bitset_memory", "sfbm", "fixedBitsetMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsFixedBitsetMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1232,7 +1233,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "segments.index_writer_memory", "siwm", "segmentsIndexWriterMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsIndexWriterMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1244,7 +1245,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "segments.memory", "sm", "segmentsMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1256,7 +1257,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "segments.version_map_memory", "svmm", "segmentsVersionMapMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsVersionMapMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1268,7 +1269,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "suggest.current", "suc", "suggestCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SuggestCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1280,7 +1281,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "suggest.time", "suti", "suggestTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SuggestTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1292,7 +1293,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "suggest.total", "suto", "suggestTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SuggestTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1304,7 +1305,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "type", "t": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1316,7 +1317,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "uptime", "u": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Uptime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1327,7 +1328,7 @@ func (s *NodesRecord) UnmarshalJSON(data []byte) error { case "version", "v": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/nodestatistics.go b/typedapi/types/nodestatistics.go index 06c1968cdc..5c6cfe4d84 100644 --- a/typedapi/types/nodestatistics.go +++ b/typedapi/types/nodestatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Node.ts#L28-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Node.ts#L28-L39 type NodeStatistics struct { // Failed Number of nodes that rejected the request or failed to respond. If this value // is not 0, a reason for the rejection or failure is included in the response. @@ -65,7 +66,7 @@ func (s *NodeStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Failed", err) } s.Failed = value case float64: @@ -75,7 +76,7 @@ func (s *NodeStatistics) UnmarshalJSON(data []byte) error { case "failures": if err := dec.Decode(&s.Failures); err != nil { - return err + return fmt.Errorf("%s | %w", "Failures", err) } case "successful": @@ -86,7 +87,7 @@ func (s *NodeStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Successful", err) } s.Successful = value case float64: @@ -102,7 +103,7 @@ func (s *NodeStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: diff --git a/typedapi/types/nodetasks.go b/typedapi/types/nodetasks.go index c221cb110e..e9f052adac 100644 --- a/typedapi/types/nodetasks.go +++ b/typedapi/types/nodetasks.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // NodeTasks type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/tasks/_types/TaskListResponseBase.ts#L49-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/tasks/_types/TaskListResponseBase.ts#L49-L57 type NodeTasks struct { Attributes map[string]string `json:"attributes,omitempty"` Host *string `json:"host,omitempty"` @@ -60,27 +61,27 @@ func (s *NodeTasks) UnmarshalJSON(data []byte) error { s.Attributes = make(map[string]string, 0) } if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "ip": if err := dec.Decode(&s.Ip); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "tasks": @@ -88,12 +89,12 @@ func (s *NodeTasks) UnmarshalJSON(data []byte) error { s.Tasks = make(map[string]TaskInfo, 0) } if err := dec.Decode(&s.Tasks); err != nil { - return err + return fmt.Errorf("%s | %w", "Tasks", err) } case "transport_address": if err := dec.Decode(&s.TransportAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "TransportAddress", err) } } diff --git a/typedapi/types/nodethreadpoolinfo.go b/typedapi/types/nodethreadpoolinfo.go index 3f5a7038bb..e2b79407e1 100644 --- a/typedapi/types/nodethreadpoolinfo.go +++ b/typedapi/types/nodethreadpoolinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // NodeThreadPoolInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/info/types.ts#L294-L301 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/info/types.ts#L294-L301 type NodeThreadPoolInfo struct { Core *int `json:"core,omitempty"` KeepAlive Duration `json:"keep_alive,omitempty"` @@ -63,7 +64,7 @@ func (s *NodeThreadPoolInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Core", err) } s.Core = &value case float64: @@ -73,7 +74,7 @@ func (s *NodeThreadPoolInfo) UnmarshalJSON(data []byte) error { case "keep_alive": if err := dec.Decode(&s.KeepAlive); err != nil { - return err + return fmt.Errorf("%s | %w", "KeepAlive", err) } case "max": @@ -84,7 +85,7 @@ func (s *NodeThreadPoolInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } s.Max = &value case float64: @@ -100,7 +101,7 @@ func (s *NodeThreadPoolInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "QueueSize", err) } s.QueueSize = value case float64: @@ -116,7 +117,7 @@ func (s *NodeThreadPoolInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -127,7 +128,7 @@ func (s *NodeThreadPoolInfo) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/nodeusage.go b/typedapi/types/nodeusage.go index 4331f5a2d9..31eefbee9e 100644 --- a/typedapi/types/nodeusage.go +++ b/typedapi/types/nodeusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // NodeUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/usage/types.ts#L25-L30 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/usage/types.ts#L25-L30 type NodeUsage struct { Aggregations map[string]json.RawMessage `json:"aggregations"` RestActions map[string]int `json:"rest_actions"` @@ -57,7 +58,7 @@ func (s *NodeUsage) UnmarshalJSON(data []byte) error { s.Aggregations = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "rest_actions": @@ -65,17 +66,17 @@ func (s *NodeUsage) UnmarshalJSON(data []byte) error { s.RestActions = make(map[string]int, 0) } if err := dec.Decode(&s.RestActions); err != nil { - return err + return fmt.Errorf("%s | %w", "RestActions", err) } case "since": if err := dec.Decode(&s.Since); err != nil { - return err + return fmt.Errorf("%s | %w", "Since", err) } case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } } diff --git a/typedapi/types/norianalyzer.go b/typedapi/types/norianalyzer.go index 2d3c0b0aae..eedc1de773 100644 --- a/typedapi/types/norianalyzer.go +++ b/typedapi/types/norianalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // NoriAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/analyzers.ts#L66-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/analyzers.ts#L66-L72 type NoriAnalyzer struct { DecompoundMode *noridecompoundmode.NoriDecompoundMode `json:"decompound_mode,omitempty"` Stoptags []string `json:"stoptags,omitempty"` @@ -58,23 +59,23 @@ func (s *NoriAnalyzer) UnmarshalJSON(data []byte) error { case "decompound_mode": if err := dec.Decode(&s.DecompoundMode); err != nil { - return err + return fmt.Errorf("%s | %w", "DecompoundMode", err) } case "stoptags": if err := dec.Decode(&s.Stoptags); err != nil { - return err + return fmt.Errorf("%s | %w", "Stoptags", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "user_dictionary": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UserDictionary", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,7 +86,7 @@ func (s *NoriAnalyzer) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/noripartofspeechtokenfilter.go b/typedapi/types/noripartofspeechtokenfilter.go index 727be7c2e8..45fa43a40a 100644 --- a/typedapi/types/noripartofspeechtokenfilter.go +++ b/typedapi/types/noripartofspeechtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // NoriPartOfSpeechTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L273-L276 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L273-L276 type NoriPartOfSpeechTokenFilter struct { Stoptags []string `json:"stoptags,omitempty"` Type string `json:"type,omitempty"` @@ -53,17 +54,17 @@ func (s *NoriPartOfSpeechTokenFilter) UnmarshalJSON(data []byte) error { case "stoptags": if err := dec.Decode(&s.Stoptags); err != nil { - return err + return fmt.Errorf("%s | %w", "Stoptags", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/noritokenizer.go b/typedapi/types/noritokenizer.go index edb1c6b0e5..ce5704063b 100644 --- a/typedapi/types/noritokenizer.go +++ b/typedapi/types/noritokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // NoriTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L81-L87 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L81-L87 type NoriTokenizer struct { DecompoundMode *noridecompoundmode.NoriDecompoundMode `json:"decompound_mode,omitempty"` DiscardPunctuation *bool `json:"discard_punctuation,omitempty"` @@ -59,7 +60,7 @@ func (s *NoriTokenizer) UnmarshalJSON(data []byte) error { case "decompound_mode": if err := dec.Decode(&s.DecompoundMode); err != nil { - return err + return fmt.Errorf("%s | %w", "DecompoundMode", err) } case "discard_punctuation": @@ -69,7 +70,7 @@ func (s *NoriTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DiscardPunctuation", err) } s.DiscardPunctuation = &value case bool: @@ -78,13 +79,13 @@ func (s *NoriTokenizer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "user_dictionary": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UserDictionary", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -95,12 +96,12 @@ func (s *NoriTokenizer) UnmarshalJSON(data []byte) error { case "user_dictionary_rules": if err := dec.Decode(&s.UserDictionaryRules); err != nil { - return err + return fmt.Errorf("%s | %w", "UserDictionaryRules", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/normalizeaggregation.go b/typedapi/types/normalizeaggregation.go index 50a7ffc609..6f45fca12a 100644 --- a/typedapi/types/normalizeaggregation.go +++ b/typedapi/types/normalizeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // NormalizeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L319-L324 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L319-L324 type NormalizeAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -66,13 +67,13 @@ func (s *NormalizeAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,23 +84,23 @@ func (s *NormalizeAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "method": if err := dec.Decode(&s.Method); err != nil { - return err + return fmt.Errorf("%s | %w", "Method", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/normalizer.go b/typedapi/types/normalizer.go index ae0986a988..8b735c583a 100644 --- a/typedapi/types/normalizer.go +++ b/typedapi/types/normalizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // LowercaseNormalizer // CustomNormalizer // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/normalizers.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/normalizers.ts#L20-L24 type Normalizer interface{} diff --git a/typedapi/types/numberrangequery.go b/typedapi/types/numberrangequery.go index 85ae068011..1e1f5a53d1 100644 --- a/typedapi/types/numberrangequery.go +++ b/typedapi/types/numberrangequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // NumberRangeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L145-L164 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L145-L164 type NumberRangeQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -77,7 +78,7 @@ func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -88,7 +89,7 @@ func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { case "from": if err := dec.Decode(&s.From); err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } case "gt": @@ -98,7 +99,7 @@ func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Gt", err) } f := Float64(value) s.Gt = &f @@ -114,7 +115,7 @@ func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Gte", err) } f := Float64(value) s.Gte = &f @@ -130,7 +131,7 @@ func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lt", err) } f := Float64(value) s.Lt = &f @@ -146,7 +147,7 @@ func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lte", err) } f := Float64(value) s.Lte = &f @@ -158,7 +159,7 @@ func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -169,12 +170,12 @@ func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { case "relation": if err := dec.Decode(&s.Relation); err != nil { - return err + return fmt.Errorf("%s | %w", "Relation", err) } case "to": if err := dec.Decode(&s.To); err != nil { - return err + return fmt.Errorf("%s | %w", "To", err) } } diff --git a/typedapi/types/numericdecayfunction.go b/typedapi/types/numericdecayfunction.go index 96c3c20f93..f1baad1ec7 100644 --- a/typedapi/types/numericdecayfunction.go +++ b/typedapi/types/numericdecayfunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -29,7 +29,7 @@ import ( // NumericDecayFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L182-L184 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L182-L184 type NumericDecayFunction struct { // MultiValueMode Determines how the distance is calculated when a field used for computing the // decay contains multiple values. diff --git a/typedapi/types/numericfielddata.go b/typedapi/types/numericfielddata.go index 075d1a4793..46902a8e47 100644 --- a/typedapi/types/numericfielddata.go +++ b/typedapi/types/numericfielddata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // NumericFielddata type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/NumericFielddata.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/NumericFielddata.ts#L22-L24 type NumericFielddata struct { Format numericfielddataformat.NumericFielddataFormat `json:"format"` } diff --git a/typedapi/types/objectproperty.go b/typedapi/types/objectproperty.go index 3ebe13dd40..f8a00bec93 100644 --- a/typedapi/types/objectproperty.go +++ b/typedapi/types/objectproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // ObjectProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/complex.ts#L46-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/complex.ts#L46-L50 type ObjectProperty struct { CopyTo []string `json:"copy_to,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` @@ -44,6 +45,7 @@ type ObjectProperty struct { Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` + Subobjects *bool `json:"subobjects,omitempty"` Type string `json:"type,omitempty"` } @@ -68,19 +70,19 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "enabled": @@ -90,7 +92,7 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = &value case bool: @@ -412,7 +414,7 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -425,7 +427,7 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -738,7 +740,7 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -754,16 +756,30 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: s.Store = &v } + case "subobjects": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Subobjects", err) + } + s.Subobjects = &value + case bool: + s.Subobjects = &v + } + case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } @@ -784,6 +800,7 @@ func (s ObjectProperty) MarshalJSON() ([]byte, error) { Properties: s.Properties, Similarity: s.Similarity, Store: s.Store, + Subobjects: s.Subobjects, Type: s.Type, } diff --git a/typedapi/types/onehotencodingpreprocessor.go b/typedapi/types/onehotencodingpreprocessor.go index 1ec03fcdda..9269c30858 100644 --- a/typedapi/types/onehotencodingpreprocessor.go +++ b/typedapi/types/onehotencodingpreprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // OneHotEncodingPreprocessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/types.ts#L44-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/types.ts#L44-L47 type OneHotEncodingPreprocessor struct { Field string `json:"field"` HotMap map[string]string `json:"hot_map"` @@ -54,7 +55,7 @@ func (s *OneHotEncodingPreprocessor) UnmarshalJSON(data []byte) error { case "field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,7 +69,7 @@ func (s *OneHotEncodingPreprocessor) UnmarshalJSON(data []byte) error { s.HotMap = make(map[string]string, 0) } if err := dec.Decode(&s.HotMap); err != nil { - return err + return fmt.Errorf("%s | %w", "HotMap", err) } } diff --git a/typedapi/types/operatingsystem.go b/typedapi/types/operatingsystem.go index bacd16e93b..fc2b666261 100644 --- a/typedapi/types/operatingsystem.go +++ b/typedapi/types/operatingsystem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // OperatingSystem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L945-L951 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L945-L951 type OperatingSystem struct { Cgroup *Cgroup `json:"cgroup,omitempty"` Cpu *Cpu `json:"cpu,omitempty"` @@ -56,22 +57,22 @@ func (s *OperatingSystem) UnmarshalJSON(data []byte) error { case "cgroup": if err := dec.Decode(&s.Cgroup); err != nil { - return err + return fmt.Errorf("%s | %w", "Cgroup", err) } case "cpu": if err := dec.Decode(&s.Cpu); err != nil { - return err + return fmt.Errorf("%s | %w", "Cpu", err) } case "mem": if err := dec.Decode(&s.Mem); err != nil { - return err + return fmt.Errorf("%s | %w", "Mem", err) } case "swap": if err := dec.Decode(&s.Swap); err != nil { - return err + return fmt.Errorf("%s | %w", "Swap", err) } case "timestamp": @@ -81,7 +82,7 @@ func (s *OperatingSystem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } s.Timestamp = &value case float64: diff --git a/typedapi/types/operatingsystemmemoryinfo.go b/typedapi/types/operatingsystemmemoryinfo.go index 2034fa1165..e5c141c3ce 100644 --- a/typedapi/types/operatingsystemmemoryinfo.go +++ b/typedapi/types/operatingsystemmemoryinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // OperatingSystemMemoryInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/stats/types.ts#L541-L568 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/stats/types.ts#L541-L568 type OperatingSystemMemoryInfo struct { // AdjustedTotalInBytes Total amount, in bytes, of memory across all selected nodes, but using the // value specified using the `es.total_memory_bytes` system property instead of @@ -70,7 +71,7 @@ func (s *OperatingSystemMemoryInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AdjustedTotalInBytes", err) } s.AdjustedTotalInBytes = &value case float64: @@ -85,7 +86,7 @@ func (s *OperatingSystemMemoryInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FreeInBytes", err) } s.FreeInBytes = value case float64: @@ -101,7 +102,7 @@ func (s *OperatingSystemMemoryInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FreePercent", err) } s.FreePercent = value case float64: @@ -116,7 +117,7 @@ func (s *OperatingSystemMemoryInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalInBytes", err) } s.TotalInBytes = value case float64: @@ -131,7 +132,7 @@ func (s *OperatingSystemMemoryInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UsedInBytes", err) } s.UsedInBytes = value case float64: @@ -147,7 +148,7 @@ func (s *OperatingSystemMemoryInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UsedPercent", err) } s.UsedPercent = value case float64: diff --git a/typedapi/types/operationcontainer.go b/typedapi/types/operationcontainer.go index ea216d3040..42951c1c2c 100644 --- a/typedapi/types/operationcontainer.go +++ b/typedapi/types/operationcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // OperationContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/bulk/types.ts#L145-L167 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/bulk/types.ts#L145-L167 type OperationContainer struct { // Create Indexes the specified document if it does not already exist. // The following line must contain the source data to be indexed. diff --git a/typedapi/types/outlierdetectionparameters.go b/typedapi/types/outlierdetectionparameters.go index 57c7610f92..0de1056312 100644 --- a/typedapi/types/outlierdetectionparameters.go +++ b/typedapi/types/outlierdetectionparameters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // OutlierDetectionParameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L527-L561 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L527-L561 type OutlierDetectionParameters struct { // ComputeFeatureInfluence Specifies whether the feature influence calculation is enabled. ComputeFeatureInfluence *bool `json:"compute_feature_influence,omitempty"` @@ -85,7 +86,7 @@ func (s *OutlierDetectionParameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ComputeFeatureInfluence", err) } s.ComputeFeatureInfluence = &value case bool: @@ -99,7 +100,7 @@ func (s *OutlierDetectionParameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureInfluenceThreshold", err) } f := Float64(value) s.FeatureInfluenceThreshold = &f @@ -111,7 +112,7 @@ func (s *OutlierDetectionParameters) UnmarshalJSON(data []byte) error { case "method": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Method", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -128,7 +129,7 @@ func (s *OutlierDetectionParameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NNeighbors", err) } s.NNeighbors = &value case float64: @@ -143,7 +144,7 @@ func (s *OutlierDetectionParameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "OutlierFraction", err) } f := Float64(value) s.OutlierFraction = &f @@ -159,7 +160,7 @@ func (s *OutlierDetectionParameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "StandardizationEnabled", err) } s.StandardizationEnabled = &value case bool: diff --git a/typedapi/types/overallbucket.go b/typedapi/types/overallbucket.go index adb8c74db0..3a7d0d1a45 100644 --- a/typedapi/types/overallbucket.go +++ b/typedapi/types/overallbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // OverallBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Bucket.ts#L130-L145 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Bucket.ts#L130-L145 type OverallBucket struct { // BucketSpan The length of the bucket in seconds. Matches the job with the longest // bucket_span value. @@ -67,7 +68,7 @@ func (s *OverallBucket) UnmarshalJSON(data []byte) error { case "bucket_span": if err := dec.Decode(&s.BucketSpan); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketSpan", err) } case "is_interim": @@ -77,7 +78,7 @@ func (s *OverallBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsInterim", err) } s.IsInterim = value case bool: @@ -86,7 +87,7 @@ func (s *OverallBucket) UnmarshalJSON(data []byte) error { case "jobs": if err := dec.Decode(&s.Jobs); err != nil { - return err + return fmt.Errorf("%s | %w", "Jobs", err) } case "overall_score": @@ -96,7 +97,7 @@ func (s *OverallBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "OverallScore", err) } f := Float64(value) s.OverallScore = f @@ -108,7 +109,7 @@ func (s *OverallBucket) UnmarshalJSON(data []byte) error { case "result_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -119,12 +120,12 @@ func (s *OverallBucket) UnmarshalJSON(data []byte) error { case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } case "timestamp_string": if err := dec.Decode(&s.TimestampString); err != nil { - return err + return fmt.Errorf("%s | %w", "TimestampString", err) } } diff --git a/typedapi/types/overallbucketjob.go b/typedapi/types/overallbucketjob.go index d506b48bf6..cf498cc853 100644 --- a/typedapi/types/overallbucketjob.go +++ b/typedapi/types/overallbucketjob.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // OverallBucketJob type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Bucket.ts#L146-L149 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Bucket.ts#L146-L149 type OverallBucketJob struct { JobId string `json:"job_id"` MaxAnomalyScore Float64 `json:"max_anomaly_score"` @@ -53,7 +54,7 @@ func (s *OverallBucketJob) UnmarshalJSON(data []byte) error { case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "max_anomaly_score": @@ -63,7 +64,7 @@ func (s *OverallBucketJob) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAnomalyScore", err) } f := Float64(value) s.MaxAnomalyScore = f diff --git a/typedapi/types/overlapping.go b/typedapi/types/overlapping.go index 9013f7cc5e..9806b7e956 100644 --- a/typedapi/types/overlapping.go +++ b/typedapi/types/overlapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Overlapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L39-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L39-L42 type Overlapping struct { IndexPatterns []string `json:"index_patterns"` Name string `json:"name"` @@ -52,12 +53,12 @@ func (s *Overlapping) UnmarshalJSON(data []byte) error { case "index_patterns": if err := dec.Decode(&s.IndexPatterns); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPatterns", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/page.go b/typedapi/types/page.go index 18a0edf1e3..3248a0ebb3 100644 --- a/typedapi/types/page.go +++ b/typedapi/types/page.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Page type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Page.ts#L22-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Page.ts#L22-L33 type Page struct { // From Skips the specified number of items. From *int `json:"from,omitempty"` @@ -61,7 +62,7 @@ func (s *Page) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } s.From = &value case float64: @@ -77,7 +78,7 @@ func (s *Page) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: diff --git a/typedapi/types/pagerdutyaction.go b/typedapi/types/pagerdutyaction.go index 47a2fa61bf..c27fc63769 100644 --- a/typedapi/types/pagerdutyaction.go +++ b/typedapi/types/pagerdutyaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // PagerDutyAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L54-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L54-L54 type PagerDutyAction struct { Account *string `json:"account,omitempty"` AttachPayload bool `json:"attach_payload"` @@ -63,7 +64,7 @@ func (s *PagerDutyAction) UnmarshalJSON(data []byte) error { case "account": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Account", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *PagerDutyAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AttachPayload", err) } s.AttachPayload = value case bool: @@ -89,7 +90,7 @@ func (s *PagerDutyAction) UnmarshalJSON(data []byte) error { case "client": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Client", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -101,7 +102,7 @@ func (s *PagerDutyAction) UnmarshalJSON(data []byte) error { case "client_url": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ClientUrl", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -112,13 +113,13 @@ func (s *PagerDutyAction) UnmarshalJSON(data []byte) error { case "contexts", "context": if err := dec.Decode(&s.Contexts); err != nil { - return err + return fmt.Errorf("%s | %w", "Contexts", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -129,13 +130,13 @@ func (s *PagerDutyAction) UnmarshalJSON(data []byte) error { case "event_type": if err := dec.Decode(&s.EventType); err != nil { - return err + return fmt.Errorf("%s | %w", "EventType", err) } case "incident_key": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IncidentKey", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -146,7 +147,7 @@ func (s *PagerDutyAction) UnmarshalJSON(data []byte) error { case "proxy": if err := dec.Decode(&s.Proxy); err != nil { - return err + return fmt.Errorf("%s | %w", "Proxy", err) } } diff --git a/typedapi/types/pagerdutycontext.go b/typedapi/types/pagerdutycontext.go index a379e3e99d..db173b3afc 100644 --- a/typedapi/types/pagerdutycontext.go +++ b/typedapi/types/pagerdutycontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // PagerDutyContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L61-L65 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L61-L65 type PagerDutyContext struct { Href *string `json:"href,omitempty"` Src *string `json:"src,omitempty"` @@ -57,7 +58,7 @@ func (s *PagerDutyContext) UnmarshalJSON(data []byte) error { case "href": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Href", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -69,7 +70,7 @@ func (s *PagerDutyContext) UnmarshalJSON(data []byte) error { case "src": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Src", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,7 +81,7 @@ func (s *PagerDutyContext) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/pagerdutyevent.go b/typedapi/types/pagerdutyevent.go index dae5a1a217..29d6af152e 100644 --- a/typedapi/types/pagerdutyevent.go +++ b/typedapi/types/pagerdutyevent.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // PagerDutyEvent type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L40-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L40-L52 type PagerDutyEvent struct { Account *string `json:"account,omitempty"` AttachPayload bool `json:"attach_payload"` @@ -63,7 +64,7 @@ func (s *PagerDutyEvent) UnmarshalJSON(data []byte) error { case "account": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Account", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *PagerDutyEvent) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AttachPayload", err) } s.AttachPayload = value case bool: @@ -89,7 +90,7 @@ func (s *PagerDutyEvent) UnmarshalJSON(data []byte) error { case "client": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Client", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -101,7 +102,7 @@ func (s *PagerDutyEvent) UnmarshalJSON(data []byte) error { case "client_url": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ClientUrl", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -112,13 +113,13 @@ func (s *PagerDutyEvent) UnmarshalJSON(data []byte) error { case "contexts", "context": if err := dec.Decode(&s.Contexts); err != nil { - return err + return fmt.Errorf("%s | %w", "Contexts", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -129,13 +130,13 @@ func (s *PagerDutyEvent) UnmarshalJSON(data []byte) error { case "event_type": if err := dec.Decode(&s.EventType); err != nil { - return err + return fmt.Errorf("%s | %w", "EventType", err) } case "incident_key": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IncidentKey", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -146,7 +147,7 @@ func (s *PagerDutyEvent) UnmarshalJSON(data []byte) error { case "proxy": if err := dec.Decode(&s.Proxy); err != nil { - return err + return fmt.Errorf("%s | %w", "Proxy", err) } } diff --git a/typedapi/types/pagerdutyeventproxy.go b/typedapi/types/pagerdutyeventproxy.go index 8c891419ce..5000cfe41d 100644 --- a/typedapi/types/pagerdutyeventproxy.go +++ b/typedapi/types/pagerdutyeventproxy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PagerDutyEventProxy type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L56-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L56-L59 type PagerDutyEventProxy struct { Host *string `json:"host,omitempty"` Port *int `json:"port,omitempty"` @@ -53,7 +54,7 @@ func (s *PagerDutyEventProxy) UnmarshalJSON(data []byte) error { case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "port": @@ -64,7 +65,7 @@ func (s *PagerDutyEventProxy) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Port", err) } s.Port = &value case float64: diff --git a/typedapi/types/pagerdutyresult.go b/typedapi/types/pagerdutyresult.go index 90d16bc57c..a110d5c8c4 100644 --- a/typedapi/types/pagerdutyresult.go +++ b/typedapi/types/pagerdutyresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PagerDutyResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L78-L83 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L78-L83 type PagerDutyResult struct { Event PagerDutyEvent `json:"event"` Reason *string `json:"reason,omitempty"` @@ -55,13 +56,13 @@ func (s *PagerDutyResult) UnmarshalJSON(data []byte) error { case "event": if err := dec.Decode(&s.Event); err != nil { - return err + return fmt.Errorf("%s | %w", "Event", err) } case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,12 +73,12 @@ func (s *PagerDutyResult) UnmarshalJSON(data []byte) error { case "request": if err := dec.Decode(&s.Request); err != nil { - return err + return fmt.Errorf("%s | %w", "Request", err) } case "response": if err := dec.Decode(&s.Response); err != nil { - return err + return fmt.Errorf("%s | %w", "Response", err) } } diff --git a/typedapi/types/painlesscontextsetup.go b/typedapi/types/painlesscontextsetup.go index 2852430106..1b1d6ca5ab 100644 --- a/typedapi/types/painlesscontextsetup.go +++ b/typedapi/types/painlesscontextsetup.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // PainlessContextSetup type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/scripts_painless_execute/types.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/scripts_painless_execute/types.ts#L25-L39 type PainlessContextSetup struct { // Document Document that’s temporarily indexed in-memory and accessible from the script. Document json.RawMessage `json:"document,omitempty"` @@ -58,17 +59,17 @@ func (s *PainlessContextSetup) UnmarshalJSON(data []byte) error { case "document": if err := dec.Decode(&s.Document); err != nil { - return err + return fmt.Errorf("%s | %w", "Document", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } } diff --git a/typedapi/types/parentaggregate.go b/typedapi/types/parentaggregate.go index 8237896655..7a0be6120d 100644 --- a/typedapi/types/parentaggregate.go +++ b/typedapi/types/parentaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // ParentAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L779-L780 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L779-L780 type ParentAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *ParentAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -71,7 +71,7 @@ func (s *ParentAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } default: @@ -88,490 +88,490 @@ func (s *ParentAggregate) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -581,7 +581,7 @@ func (s *ParentAggregate) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/parentaggregation.go b/typedapi/types/parentaggregation.go index 49617b5d2c..5e1199d2af 100644 --- a/typedapi/types/parentaggregation.go +++ b/typedapi/types/parentaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ParentAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L643-L648 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L643-L648 type ParentAggregation struct { Meta Metadata `json:"meta,omitempty"` Name *string `json:"name,omitempty"` @@ -55,13 +56,13 @@ func (s *ParentAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,7 +73,7 @@ func (s *ParentAggregation) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/parentidquery.go b/typedapi/types/parentidquery.go index 45a9680ebc..2bfbe9b834 100644 --- a/typedapi/types/parentidquery.go +++ b/typedapi/types/parentidquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ParentIdQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/joining.ts#L132-L146 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/joining.ts#L132-L146 type ParentIdQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -70,7 +71,7 @@ func (s *ParentIdQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -81,7 +82,7 @@ func (s *ParentIdQuery) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "ignore_unmapped": @@ -91,7 +92,7 @@ func (s *ParentIdQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnmapped", err) } s.IgnoreUnmapped = &value case bool: @@ -101,7 +102,7 @@ func (s *ParentIdQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -112,7 +113,7 @@ func (s *ParentIdQuery) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/parenttaskinfo.go b/typedapi/types/parenttaskinfo.go index d585e6588b..fd685f2c17 100644 --- a/typedapi/types/parenttaskinfo.go +++ b/typedapi/types/parenttaskinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ParentTaskInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/tasks/_types/TaskListResponseBase.ts#L45-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/tasks/_types/TaskListResponseBase.ts#L45-L47 type ParentTaskInfo struct { Action string `json:"action"` Cancellable bool `json:"cancellable"` @@ -67,7 +68,7 @@ func (s *ParentTaskInfo) UnmarshalJSON(data []byte) error { case "action": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Action", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,7 +84,7 @@ func (s *ParentTaskInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Cancellable", err) } s.Cancellable = value case bool: @@ -97,7 +98,7 @@ func (s *ParentTaskInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Cancelled", err) } s.Cancelled = &value case bool: @@ -106,13 +107,13 @@ func (s *ParentTaskInfo) UnmarshalJSON(data []byte) error { case "children": if err := dec.Decode(&s.Children); err != nil { - return err + return fmt.Errorf("%s | %w", "Children", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -126,7 +127,7 @@ func (s *ParentTaskInfo) UnmarshalJSON(data []byte) error { s.Headers = make(map[string]string, 0) } if err := dec.Decode(&s.Headers); err != nil { - return err + return fmt.Errorf("%s | %w", "Headers", err) } case "id": @@ -136,7 +137,7 @@ func (s *ParentTaskInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } s.Id = value case float64: @@ -146,38 +147,38 @@ func (s *ParentTaskInfo) UnmarshalJSON(data []byte) error { case "node": if err := dec.Decode(&s.Node); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } case "parent_task_id": if err := dec.Decode(&s.ParentTaskId); err != nil { - return err + return fmt.Errorf("%s | %w", "ParentTaskId", err) } case "running_time": if err := dec.Decode(&s.RunningTime); err != nil { - return err + return fmt.Errorf("%s | %w", "RunningTime", err) } case "running_time_in_nanos": if err := dec.Decode(&s.RunningTimeInNanos); err != nil { - return err + return fmt.Errorf("%s | %w", "RunningTimeInNanos", err) } case "start_time_in_millis": if err := dec.Decode(&s.StartTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTimeInMillis", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/passthroughinferenceoptions.go b/typedapi/types/passthroughinferenceoptions.go index d2b3886bc3..1fbe6d7dd2 100644 --- a/typedapi/types/passthroughinferenceoptions.go +++ b/typedapi/types/passthroughinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PassThroughInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L224-L231 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L224-L231 type PassThroughInferenceOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. @@ -58,7 +59,7 @@ func (s *PassThroughInferenceOptions) UnmarshalJSON(data []byte) error { case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -69,12 +70,12 @@ func (s *PassThroughInferenceOptions) UnmarshalJSON(data []byte) error { case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } case "vocabulary": if err := dec.Decode(&s.Vocabulary); err != nil { - return err + return fmt.Errorf("%s | %w", "Vocabulary", err) } } diff --git a/typedapi/types/passthroughinferenceupdateoptions.go b/typedapi/types/passthroughinferenceupdateoptions.go index b3af03c28d..17add28c7a 100644 --- a/typedapi/types/passthroughinferenceupdateoptions.go +++ b/typedapi/types/passthroughinferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PassThroughInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L385-L390 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L385-L390 type PassThroughInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. @@ -57,7 +58,7 @@ func (s *PassThroughInferenceUpdateOptions) UnmarshalJSON(data []byte) error { case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,7 +69,7 @@ func (s *PassThroughInferenceUpdateOptions) UnmarshalJSON(data []byte) error { case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/types/pathhierarchytokenizer.go b/typedapi/types/pathhierarchytokenizer.go index f52f1d40e8..5e6f465d2b 100644 --- a/typedapi/types/pathhierarchytokenizer.go +++ b/typedapi/types/pathhierarchytokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PathHierarchyTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L89-L96 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L89-L96 type PathHierarchyTokenizer struct { BufferSize Stringifiedinteger `json:"buffer_size,omitempty"` Delimiter *string `json:"delimiter,omitempty"` @@ -58,13 +59,13 @@ func (s *PathHierarchyTokenizer) UnmarshalJSON(data []byte) error { case "buffer_size": if err := dec.Decode(&s.BufferSize); err != nil { - return err + return fmt.Errorf("%s | %w", "BufferSize", err) } case "delimiter": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Delimiter", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -76,7 +77,7 @@ func (s *PathHierarchyTokenizer) UnmarshalJSON(data []byte) error { case "replacement": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Replacement", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -87,22 +88,22 @@ func (s *PathHierarchyTokenizer) UnmarshalJSON(data []byte) error { case "reverse": if err := dec.Decode(&s.Reverse); err != nil { - return err + return fmt.Errorf("%s | %w", "Reverse", err) } case "skip": if err := dec.Decode(&s.Skip); err != nil { - return err + return fmt.Errorf("%s | %w", "Skip", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/patternanalyzer.go b/typedapi/types/patternanalyzer.go index e3b586203a..b7464e5fb9 100644 --- a/typedapi/types/patternanalyzer.go +++ b/typedapi/types/patternanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PatternAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/analyzers.ts#L74-L81 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/analyzers.ts#L74-L81 type PatternAnalyzer struct { Flags *string `json:"flags,omitempty"` Lowercase *bool `json:"lowercase,omitempty"` @@ -58,7 +59,7 @@ func (s *PatternAnalyzer) UnmarshalJSON(data []byte) error { case "flags": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Flags", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,7 +75,7 @@ func (s *PatternAnalyzer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lowercase", err) } s.Lowercase = &value case bool: @@ -84,7 +85,7 @@ func (s *PatternAnalyzer) UnmarshalJSON(data []byte) error { case "pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -99,24 +100,24 @@ func (s *PatternAnalyzer) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } s.Stopwords = append(s.Stopwords, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/patterncapturetokenfilter.go b/typedapi/types/patterncapturetokenfilter.go index ec4616098b..98a11d5986 100644 --- a/typedapi/types/patterncapturetokenfilter.go +++ b/typedapi/types/patterncapturetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // PatternCaptureTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L278-L282 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L278-L282 type PatternCaptureTokenFilter struct { Patterns []string `json:"patterns"` PreserveOriginal Stringifiedboolean `json:"preserve_original,omitempty"` @@ -54,22 +55,22 @@ func (s *PatternCaptureTokenFilter) UnmarshalJSON(data []byte) error { case "patterns": if err := dec.Decode(&s.Patterns); err != nil { - return err + return fmt.Errorf("%s | %w", "Patterns", err) } case "preserve_original": if err := dec.Decode(&s.PreserveOriginal); err != nil { - return err + return fmt.Errorf("%s | %w", "PreserveOriginal", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/patternreplacecharfilter.go b/typedapi/types/patternreplacecharfilter.go index 8ed8724928..fd454050ee 100644 --- a/typedapi/types/patternreplacecharfilter.go +++ b/typedapi/types/patternreplacecharfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PatternReplaceCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/char_filters.ts#L53-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/char_filters.ts#L53-L58 type PatternReplaceCharFilter struct { Flags *string `json:"flags,omitempty"` Pattern string `json:"pattern"` @@ -57,7 +58,7 @@ func (s *PatternReplaceCharFilter) UnmarshalJSON(data []byte) error { case "flags": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Flags", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -69,7 +70,7 @@ func (s *PatternReplaceCharFilter) UnmarshalJSON(data []byte) error { case "pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -81,7 +82,7 @@ func (s *PatternReplaceCharFilter) UnmarshalJSON(data []byte) error { case "replacement": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Replacement", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -92,12 +93,12 @@ func (s *PatternReplaceCharFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/patternreplacetokenfilter.go b/typedapi/types/patternreplacetokenfilter.go index b78e2d4591..19948fd6d8 100644 --- a/typedapi/types/patternreplacetokenfilter.go +++ b/typedapi/types/patternreplacetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PatternReplaceTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L284-L290 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L284-L290 type PatternReplaceTokenFilter struct { All *bool `json:"all,omitempty"` Flags *string `json:"flags,omitempty"` @@ -62,7 +63,7 @@ func (s *PatternReplaceTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "All", err) } s.All = &value case bool: @@ -72,7 +73,7 @@ func (s *PatternReplaceTokenFilter) UnmarshalJSON(data []byte) error { case "flags": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Flags", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,7 +85,7 @@ func (s *PatternReplaceTokenFilter) UnmarshalJSON(data []byte) error { case "pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -96,7 +97,7 @@ func (s *PatternReplaceTokenFilter) UnmarshalJSON(data []byte) error { case "replacement": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Replacement", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -107,12 +108,12 @@ func (s *PatternReplaceTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/patterntokenizer.go b/typedapi/types/patterntokenizer.go index 0d33bf8649..3156b5620e 100644 --- a/typedapi/types/patterntokenizer.go +++ b/typedapi/types/patterntokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PatternTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L98-L103 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L98-L103 type PatternTokenizer struct { Flags *string `json:"flags,omitempty"` Group *int `json:"group,omitempty"` @@ -57,7 +58,7 @@ func (s *PatternTokenizer) UnmarshalJSON(data []byte) error { case "flags": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Flags", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,7 +75,7 @@ func (s *PatternTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Group", err) } s.Group = &value case float64: @@ -85,7 +86,7 @@ func (s *PatternTokenizer) UnmarshalJSON(data []byte) error { case "pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -96,12 +97,12 @@ func (s *PatternTokenizer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/pendingtask.go b/typedapi/types/pendingtask.go index c961db8e79..3cf1e572ca 100644 --- a/typedapi/types/pendingtask.go +++ b/typedapi/types/pendingtask.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PendingTask type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/pending_tasks/types.ts#L23-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/pending_tasks/types.ts#L23-L47 type PendingTask struct { // Executing Indicates whether the pending tasks are currently executing or not. Executing bool `json:"executing"` @@ -73,7 +74,7 @@ func (s *PendingTask) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Executing", err) } s.Executing = value case bool: @@ -88,7 +89,7 @@ func (s *PendingTask) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "InsertOrder", err) } s.InsertOrder = value case float64: @@ -99,7 +100,7 @@ func (s *PendingTask) UnmarshalJSON(data []byte) error { case "priority": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Priority", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -111,7 +112,7 @@ func (s *PendingTask) UnmarshalJSON(data []byte) error { case "source": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -122,12 +123,12 @@ func (s *PendingTask) UnmarshalJSON(data []byte) error { case "time_in_queue": if err := dec.Decode(&s.TimeInQueue); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeInQueue", err) } case "time_in_queue_millis": if err := dec.Decode(&s.TimeInQueueMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeInQueueMillis", err) } } diff --git a/typedapi/types/pendingtasksrecord.go b/typedapi/types/pendingtasksrecord.go index cee1d6d768..3a8daf2a07 100644 --- a/typedapi/types/pendingtasksrecord.go +++ b/typedapi/types/pendingtasksrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PendingTasksRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/pending_tasks/types.ts#L20-L41 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/pending_tasks/types.ts#L20-L41 type PendingTasksRecord struct { // InsertOrder The task insertion order. InsertOrder *string `json:"insertOrder,omitempty"` @@ -60,7 +61,7 @@ func (s *PendingTasksRecord) UnmarshalJSON(data []byte) error { case "insertOrder", "o": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "InsertOrder", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,7 +73,7 @@ func (s *PendingTasksRecord) UnmarshalJSON(data []byte) error { case "priority", "p": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Priority", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,7 +85,7 @@ func (s *PendingTasksRecord) UnmarshalJSON(data []byte) error { case "source", "s": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -96,7 +97,7 @@ func (s *PendingTasksRecord) UnmarshalJSON(data []byte) error { case "timeInQueue", "t": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeInQueue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/percentage.go b/typedapi/types/percentage.go index de8cd4bdf9..a5e32474ca 100644 --- a/typedapi/types/percentage.go +++ b/typedapi/types/percentage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // float32 // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Numeric.ts#L28-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Numeric.ts#L28-L28 type Percentage interface{} diff --git a/typedapi/types/percentagescoreheuristic.go b/typedapi/types/percentagescoreheuristic.go index 8dfc6059ad..1fd676e7ec 100644 --- a/typedapi/types/percentagescoreheuristic.go +++ b/typedapi/types/percentagescoreheuristic.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // PercentageScoreHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L764-L764 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L764-L764 type PercentageScoreHeuristic struct { } diff --git a/typedapi/types/percentileranksaggregation.go b/typedapi/types/percentileranksaggregation.go index af9391e7d9..b82da48baf 100644 --- a/typedapi/types/percentileranksaggregation.go +++ b/typedapi/types/percentileranksaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PercentileRanksAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L174-L193 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L174-L193 type PercentileRanksAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -70,13 +71,13 @@ func (s *PercentileRanksAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -87,7 +88,7 @@ func (s *PercentileRanksAggregation) UnmarshalJSON(data []byte) error { case "hdr": if err := dec.Decode(&s.Hdr); err != nil { - return err + return fmt.Errorf("%s | %w", "Hdr", err) } case "keyed": @@ -97,7 +98,7 @@ func (s *PercentileRanksAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Keyed", err) } s.Keyed = &value case bool: @@ -106,13 +107,13 @@ func (s *PercentileRanksAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -121,7 +122,7 @@ func (s *PercentileRanksAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -130,7 +131,7 @@ func (s *PercentileRanksAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -138,7 +139,7 @@ func (s *PercentileRanksAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -147,12 +148,12 @@ func (s *PercentileRanksAggregation) UnmarshalJSON(data []byte) error { case "tdigest": if err := dec.Decode(&s.Tdigest); err != nil { - return err + return fmt.Errorf("%s | %w", "Tdigest", err) } case "values": if err := dec.Decode(&s.Values); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } } diff --git a/typedapi/types/percentiles.go b/typedapi/types/percentiles.go index 90d158c237..d12f0bbf01 100644 --- a/typedapi/types/percentiles.go +++ b/typedapi/types/percentiles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // KeyedPercentiles // []ArrayPercentilesItem // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L150-L151 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L150-L151 type Percentiles interface{} diff --git a/typedapi/types/percentilesaggregation.go b/typedapi/types/percentilesaggregation.go index 0b63f71d36..36198e78b6 100644 --- a/typedapi/types/percentilesaggregation.go +++ b/typedapi/types/percentilesaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PercentilesAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L195-L214 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L195-L214 type PercentilesAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -70,13 +71,13 @@ func (s *PercentilesAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -87,7 +88,7 @@ func (s *PercentilesAggregation) UnmarshalJSON(data []byte) error { case "hdr": if err := dec.Decode(&s.Hdr); err != nil { - return err + return fmt.Errorf("%s | %w", "Hdr", err) } case "keyed": @@ -97,7 +98,7 @@ func (s *PercentilesAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Keyed", err) } s.Keyed = &value case bool: @@ -106,18 +107,18 @@ func (s *PercentilesAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "percents": if err := dec.Decode(&s.Percents); err != nil { - return err + return fmt.Errorf("%s | %w", "Percents", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -126,7 +127,7 @@ func (s *PercentilesAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -135,7 +136,7 @@ func (s *PercentilesAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -143,7 +144,7 @@ func (s *PercentilesAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -152,7 +153,7 @@ func (s *PercentilesAggregation) UnmarshalJSON(data []byte) error { case "tdigest": if err := dec.Decode(&s.Tdigest); err != nil { - return err + return fmt.Errorf("%s | %w", "Tdigest", err) } } diff --git a/typedapi/types/percentilesbucketaggregate.go b/typedapi/types/percentilesbucketaggregate.go index eb74c97151..03a13e46e9 100644 --- a/typedapi/types/percentilesbucketaggregate.go +++ b/typedapi/types/percentilesbucketaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // PercentilesBucketAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L178-L179 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L178-L179 type PercentilesBucketAggregate struct { Meta Metadata `json:"meta,omitempty"` Values Percentiles `json:"values"` @@ -52,7 +53,7 @@ func (s *PercentilesBucketAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "values": @@ -65,13 +66,13 @@ func (s *PercentilesBucketAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(KeyedPercentiles, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } s.Values = o case '[': o := []ArrayPercentilesItem{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } s.Values = o } diff --git a/typedapi/types/percentilesbucketaggregation.go b/typedapi/types/percentilesbucketaggregation.go index 5fc49a1c81..bf24e0bf26 100644 --- a/typedapi/types/percentilesbucketaggregation.go +++ b/typedapi/types/percentilesbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // PercentilesBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L354-L359 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L354-L359 type PercentilesBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -65,13 +66,13 @@ func (s *PercentilesBucketAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,18 +83,18 @@ func (s *PercentilesBucketAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,7 +105,7 @@ func (s *PercentilesBucketAggregation) UnmarshalJSON(data []byte) error { case "percents": if err := dec.Decode(&s.Percents); err != nil { - return err + return fmt.Errorf("%s | %w", "Percents", err) } } diff --git a/typedapi/types/percolatequery.go b/typedapi/types/percolatequery.go index 44f67684f7..36fc923713 100644 --- a/typedapi/types/percolatequery.go +++ b/typedapi/types/percolatequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PercolateQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L193-L230 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L193-L230 type PercolateQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -83,7 +84,7 @@ func (s *PercolateQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -94,33 +95,33 @@ func (s *PercolateQuery) UnmarshalJSON(data []byte) error { case "document": if err := dec.Decode(&s.Document); err != nil { - return err + return fmt.Errorf("%s | %w", "Document", err) } case "documents": if err := dec.Decode(&s.Documents); err != nil { - return err + return fmt.Errorf("%s | %w", "Documents", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -132,7 +133,7 @@ func (s *PercolateQuery) UnmarshalJSON(data []byte) error { case "preference": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Preference", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -144,7 +145,7 @@ func (s *PercolateQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -155,12 +156,12 @@ func (s *PercolateQuery) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/percolatorproperty.go b/typedapi/types/percolatorproperty.go index 1f25bf0f19..785cb7eda6 100644 --- a/typedapi/types/percolatorproperty.go +++ b/typedapi/types/percolatorproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // PercolatorProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L180-L182 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L180-L182 type PercolatorProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -60,7 +61,7 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -378,7 +379,7 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -391,7 +392,7 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -703,7 +704,7 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/perpartitioncategorization.go b/typedapi/types/perpartitioncategorization.go index a872e8d1a0..6a3fee6799 100644 --- a/typedapi/types/perpartitioncategorization.go +++ b/typedapi/types/perpartitioncategorization.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PerPartitionCategorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Analysis.ts#L150-L159 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Analysis.ts#L150-L159 type PerPartitionCategorization struct { // Enabled To enable this setting, you must also set the `partition_field_name` property // to the same value in every detector that uses the keyword `mlcategory`. @@ -67,7 +68,7 @@ func (s *PerPartitionCategorization) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = &value case bool: @@ -81,7 +82,7 @@ func (s *PerPartitionCategorization) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "StopOnWarn", err) } s.StopOnWarn = &value case bool: diff --git a/typedapi/types/persistenttaskstatus.go b/typedapi/types/persistenttaskstatus.go index 7b73c2d0d9..014781df0f 100644 --- a/typedapi/types/persistenttaskstatus.go +++ b/typedapi/types/persistenttaskstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // PersistentTaskStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L56-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L56-L58 type PersistentTaskStatus struct { Status shutdownstatus.ShutdownStatus `json:"status"` } diff --git a/typedapi/types/phase.go b/typedapi/types/phase.go index d72e93e1e1..314d13e6eb 100644 --- a/typedapi/types/phase.go +++ b/typedapi/types/phase.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Phase type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/_types/Phase.ts#L25-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/_types/Phase.ts#L25-L36 type Phase struct { Actions json.RawMessage `json:"actions,omitempty"` Configurations *Configurations `json:"configurations,omitempty"` @@ -53,17 +54,17 @@ func (s *Phase) UnmarshalJSON(data []byte) error { case "actions": if err := dec.Decode(&s.Actions); err != nil { - return err + return fmt.Errorf("%s | %w", "Actions", err) } case "configurations": if err := dec.Decode(&s.Configurations); err != nil { - return err + return fmt.Errorf("%s | %w", "Configurations", err) } case "min_age": if err := dec.Decode(&s.MinAge); err != nil { - return err + return fmt.Errorf("%s | %w", "MinAge", err) } } diff --git a/typedapi/types/phases.go b/typedapi/types/phases.go index 87549c4057..c0f8e0bcfc 100644 --- a/typedapi/types/phases.go +++ b/typedapi/types/phases.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Phases type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/_types/Phase.ts#L38-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/_types/Phase.ts#L38-L44 type Phases struct { Cold *Phase `json:"cold,omitempty"` Delete *Phase `json:"delete,omitempty"` diff --git a/typedapi/types/phonetictokenfilter.go b/typedapi/types/phonetictokenfilter.go index 64bf9133c2..275a22f381 100644 --- a/typedapi/types/phonetictokenfilter.go +++ b/typedapi/types/phonetictokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -35,7 +36,7 @@ import ( // PhoneticTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/phonetic-plugin.ts#L64-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/phonetic-plugin.ts#L64-L72 type PhoneticTokenFilter struct { Encoder phoneticencoder.PhoneticEncoder `json:"encoder"` Languageset []phoneticlanguage.PhoneticLanguage `json:"languageset"` @@ -64,12 +65,12 @@ func (s *PhoneticTokenFilter) UnmarshalJSON(data []byte) error { case "encoder": if err := dec.Decode(&s.Encoder); err != nil { - return err + return fmt.Errorf("%s | %w", "Encoder", err) } case "languageset": if err := dec.Decode(&s.Languageset); err != nil { - return err + return fmt.Errorf("%s | %w", "Languageset", err) } case "max_code_len": @@ -80,7 +81,7 @@ func (s *PhoneticTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxCodeLen", err) } s.MaxCodeLen = &value case float64: @@ -90,7 +91,7 @@ func (s *PhoneticTokenFilter) UnmarshalJSON(data []byte) error { case "name_type": if err := dec.Decode(&s.NameType); err != nil { - return err + return fmt.Errorf("%s | %w", "NameType", err) } case "replace": @@ -100,7 +101,7 @@ func (s *PhoneticTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Replace", err) } s.Replace = &value case bool: @@ -109,17 +110,17 @@ func (s *PhoneticTokenFilter) UnmarshalJSON(data []byte) error { case "rule_type": if err := dec.Decode(&s.RuleType); err != nil { - return err + return fmt.Errorf("%s | %w", "RuleType", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/phrasesuggest.go b/typedapi/types/phrasesuggest.go index b582b22ad1..a49f36c26b 100644 --- a/typedapi/types/phrasesuggest.go +++ b/typedapi/types/phrasesuggest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PhraseSuggest type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L57-L62 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L57-L62 type PhraseSuggest struct { Length int `json:"length"` Offset int `json:"offset"` @@ -61,7 +62,7 @@ func (s *PhraseSuggest) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Length", err) } s.Length = value case float64: @@ -77,7 +78,7 @@ func (s *PhraseSuggest) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Offset", err) } s.Offset = value case float64: @@ -91,20 +92,20 @@ func (s *PhraseSuggest) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewPhraseSuggestOption() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Options", err) } s.Options = append(s.Options, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Options); err != nil { - return err + return fmt.Errorf("%s | %w", "Options", err) } } case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/phrasesuggestcollate.go b/typedapi/types/phrasesuggestcollate.go index 91c13683ab..0b01ed63fa 100644 --- a/typedapi/types/phrasesuggestcollate.go +++ b/typedapi/types/phrasesuggestcollate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PhraseSuggestCollate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L330-L343 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L330-L343 type PhraseSuggestCollate struct { // Params Parameters to use if the query is templated. Params map[string]json.RawMessage `json:"params,omitempty"` @@ -61,7 +62,7 @@ func (s *PhraseSuggestCollate) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "prune": @@ -71,7 +72,7 @@ func (s *PhraseSuggestCollate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Prune", err) } s.Prune = &value case bool: @@ -80,7 +81,7 @@ func (s *PhraseSuggestCollate) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } } diff --git a/typedapi/types/phrasesuggestcollatequery.go b/typedapi/types/phrasesuggestcollatequery.go index 7b5912c0d1..627bd077ee 100644 --- a/typedapi/types/phrasesuggestcollatequery.go +++ b/typedapi/types/phrasesuggestcollatequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PhraseSuggestCollateQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L345-L354 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L345-L354 type PhraseSuggestCollateQuery struct { // Id The search template ID. Id *string `json:"id,omitempty"` @@ -55,13 +56,13 @@ func (s *PhraseSuggestCollateQuery) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "source": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/phrasesuggester.go b/typedapi/types/phrasesuggester.go index 4fa9b40129..59c5c4b614 100644 --- a/typedapi/types/phrasesuggester.go +++ b/typedapi/types/phrasesuggester.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PhraseSuggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L356-L414 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L356-L414 type PhraseSuggester struct { // Analyzer The analyzer to analyze the suggest text with. // Defaults to the search analyzer of the suggest field. @@ -103,7 +104,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -114,7 +115,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case "collate": if err := dec.Decode(&s.Collate); err != nil { - return err + return fmt.Errorf("%s | %w", "Collate", err) } case "confidence": @@ -124,7 +125,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Confidence", err) } f := Float64(value) s.Confidence = &f @@ -135,12 +136,12 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case "direct_generator": if err := dec.Decode(&s.DirectGenerator); err != nil { - return err + return fmt.Errorf("%s | %w", "DirectGenerator", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "force_unigrams": @@ -150,7 +151,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ForceUnigrams", err) } s.ForceUnigrams = &value case bool: @@ -165,7 +166,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "GramSize", err) } s.GramSize = &value case float64: @@ -175,7 +176,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case "highlight": if err := dec.Decode(&s.Highlight); err != nil { - return err + return fmt.Errorf("%s | %w", "Highlight", err) } case "max_errors": @@ -185,7 +186,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxErrors", err) } f := Float64(value) s.MaxErrors = &f @@ -201,7 +202,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RealWordErrorLikelihood", err) } f := Float64(value) s.RealWordErrorLikelihood = &f @@ -213,7 +214,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case "separator": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Separator", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -230,7 +231,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardSize", err) } s.ShardSize = &value case float64: @@ -246,7 +247,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -256,13 +257,13 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case "smoothing": if err := dec.Decode(&s.Smoothing); err != nil { - return err + return fmt.Errorf("%s | %w", "Smoothing", err) } case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -279,7 +280,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TokenLimit", err) } s.TokenLimit = &value case float64: diff --git a/typedapi/types/phrasesuggesthighlight.go b/typedapi/types/phrasesuggesthighlight.go index e306b6e3d4..1015277107 100644 --- a/typedapi/types/phrasesuggesthighlight.go +++ b/typedapi/types/phrasesuggesthighlight.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PhraseSuggestHighlight type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L416-L425 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L416-L425 type PhraseSuggestHighlight struct { // PostTag Use in conjunction with `pre_tag` to define the HTML tags to use for the // highlighted text. @@ -58,7 +59,7 @@ func (s *PhraseSuggestHighlight) UnmarshalJSON(data []byte) error { case "post_tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PostTag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -70,7 +71,7 @@ func (s *PhraseSuggestHighlight) UnmarshalJSON(data []byte) error { case "pre_tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PreTag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/phrasesuggestoption.go b/typedapi/types/phrasesuggestoption.go index 953b2ba61e..511ba0d2af 100644 --- a/typedapi/types/phrasesuggestoption.go +++ b/typedapi/types/phrasesuggestoption.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PhraseSuggestOption type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L86-L91 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L86-L91 type PhraseSuggestOption struct { CollateMatch *bool `json:"collate_match,omitempty"` Highlighted *string `json:"highlighted,omitempty"` @@ -60,7 +61,7 @@ func (s *PhraseSuggestOption) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CollateMatch", err) } s.CollateMatch = &value case bool: @@ -70,7 +71,7 @@ func (s *PhraseSuggestOption) UnmarshalJSON(data []byte) error { case "highlighted": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Highlighted", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -86,7 +87,7 @@ func (s *PhraseSuggestOption) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Score", err) } f := Float64(value) s.Score = f @@ -98,7 +99,7 @@ func (s *PhraseSuggestOption) UnmarshalJSON(data []byte) error { case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/pinneddoc.go b/typedapi/types/pinneddoc.go index afd85900f4..0a58188d34 100644 --- a/typedapi/types/pinneddoc.go +++ b/typedapi/types/pinneddoc.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // PinnedDoc type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L253-L262 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L253-L262 type PinnedDoc struct { // Id_ The unique document ID. Id_ string `json:"_id"` @@ -54,12 +55,12 @@ func (s *PinnedDoc) UnmarshalJSON(data []byte) error { case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } } diff --git a/typedapi/types/pinnedquery.go b/typedapi/types/pinnedquery.go index 68dc4ee946..424f209e9a 100644 --- a/typedapi/types/pinnedquery.go +++ b/typedapi/types/pinnedquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PinnedQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L232-L251 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L232-L251 type PinnedQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -72,7 +73,7 @@ func (s *PinnedQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -83,23 +84,23 @@ func (s *PinnedQuery) UnmarshalJSON(data []byte) error { case "docs": if err := dec.Decode(&s.Docs); err != nil { - return err + return fmt.Errorf("%s | %w", "Docs", err) } case "ids": if err := dec.Decode(&s.Ids); err != nil { - return err + return fmt.Errorf("%s | %w", "Ids", err) } case "organic": if err := dec.Decode(&s.Organic); err != nil { - return err + return fmt.Errorf("%s | %w", "Organic", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/pipelineconfig.go b/typedapi/types/pipelineconfig.go index f45ab8d20d..fc9cf837e8 100644 --- a/typedapi/types/pipelineconfig.go +++ b/typedapi/types/pipelineconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PipelineConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Pipeline.ts#L61-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Pipeline.ts#L61-L75 type PipelineConfig struct { // Description Description of the ingest pipeline. Description *string `json:"description,omitempty"` @@ -59,7 +60,7 @@ func (s *PipelineConfig) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -70,12 +71,12 @@ func (s *PipelineConfig) UnmarshalJSON(data []byte) error { case "processors": if err := dec.Decode(&s.Processors); err != nil { - return err + return fmt.Errorf("%s | %w", "Processors", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/pipelinemetadata.go b/typedapi/types/pipelinemetadata.go index 332a3dc2c8..bf2f07b203 100644 --- a/typedapi/types/pipelinemetadata.go +++ b/typedapi/types/pipelinemetadata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PipelineMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/logstash/_types/Pipeline.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/logstash/_types/Pipeline.ts#L23-L26 type PipelineMetadata struct { Type string `json:"type"` Version string `json:"version"` @@ -54,7 +55,7 @@ func (s *PipelineMetadata) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,7 +67,7 @@ func (s *PipelineMetadata) UnmarshalJSON(data []byte) error { case "version": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/pipelineprocessor.go b/typedapi/types/pipelineprocessor.go index 98674b6880..05edc22e96 100644 --- a/typedapi/types/pipelineprocessor.go +++ b/typedapi/types/pipelineprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PipelineProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L928-L939 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L928-L939 type PipelineProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -69,7 +70,7 @@ func (s *PipelineProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -81,7 +82,7 @@ func (s *PipelineProcessor) UnmarshalJSON(data []byte) error { case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -97,7 +98,7 @@ func (s *PipelineProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -111,7 +112,7 @@ func (s *PipelineProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissingPipeline", err) } s.IgnoreMissingPipeline = &value case bool: @@ -120,18 +121,18 @@ func (s *PipelineProcessor) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/pipelinesettings.go b/typedapi/types/pipelinesettings.go index 3ba8e17857..fdc2553dac 100644 --- a/typedapi/types/pipelinesettings.go +++ b/typedapi/types/pipelinesettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PipelineSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/logstash/_types/Pipeline.ts#L28-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/logstash/_types/Pipeline.ts#L28-L59 type PipelineSettings struct { // PipelineBatchDelay When creating pipeline event batches, how long in milliseconds to wait for // each event before dispatching an undersized batch to pipeline workers. @@ -76,7 +77,7 @@ func (s *PipelineSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PipelineBatchDelay", err) } s.PipelineBatchDelay = value case float64: @@ -92,7 +93,7 @@ func (s *PipelineSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PipelineBatchSize", err) } s.PipelineBatchSize = value case float64: @@ -108,7 +109,7 @@ func (s *PipelineSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PipelineWorkers", err) } s.PipelineWorkers = value case float64: @@ -124,7 +125,7 @@ func (s *PipelineSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "QueueCheckpointWrites", err) } s.QueueCheckpointWrites = value case float64: @@ -140,7 +141,7 @@ func (s *PipelineSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "QueueMaxBytesNumber", err) } s.QueueMaxBytesNumber = value case float64: @@ -151,7 +152,7 @@ func (s *PipelineSettings) UnmarshalJSON(data []byte) error { case "queue.max_bytes.units": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueueMaxBytesUnits", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -163,7 +164,7 @@ func (s *PipelineSettings) UnmarshalJSON(data []byte) error { case "queue.type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueueType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/pipelinesimulation.go b/typedapi/types/pipelinesimulation.go index 8c78dca0a1..39c5590ee9 100644 --- a/typedapi/types/pipelinesimulation.go +++ b/typedapi/types/pipelinesimulation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // PipelineSimulation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/simulate/types.ts#L33-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/simulate/types.ts#L33-L39 type PipelineSimulation struct { Doc *DocumentSimulation `json:"doc,omitempty"` ProcessorResults []PipelineSimulation `json:"processor_results,omitempty"` @@ -58,18 +59,18 @@ func (s *PipelineSimulation) UnmarshalJSON(data []byte) error { case "doc": if err := dec.Decode(&s.Doc); err != nil { - return err + return fmt.Errorf("%s | %w", "Doc", err) } case "processor_results": if err := dec.Decode(&s.ProcessorResults); err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessorResults", err) } case "processor_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessorType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,13 +81,13 @@ func (s *PipelineSimulation) UnmarshalJSON(data []byte) error { case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/pipeseparatedflagssimplequerystringflag.go b/typedapi/types/pipeseparatedflagssimplequerystringflag.go index a024f55e78..7a74f176a3 100644 --- a/typedapi/types/pipeseparatedflagssimplequerystringflag.go +++ b/typedapi/types/pipeseparatedflagssimplequerystringflag.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // simplequerystringflag.SimpleQueryStringFlag // string // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_spec_utils/PipeSeparatedFlags.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_spec_utils/PipeSeparatedFlags.ts#L20-L27 type PipeSeparatedFlagsSimpleQueryStringFlag interface{} diff --git a/typedapi/types/pivot.go b/typedapi/types/pivot.go index 90045663f0..b0d3611e74 100644 --- a/typedapi/types/pivot.go +++ b/typedapi/types/pivot.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Pivot type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/_types/Transform.ts#L54-L68 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/_types/Transform.ts#L54-L68 type Pivot struct { // Aggregations Defines how to aggregate the grouped data. The following aggregations are // currently supported: average, bucket diff --git a/typedapi/types/pivotgroupbycontainer.go b/typedapi/types/pivotgroupbycontainer.go index 0737423947..0188ce7e02 100644 --- a/typedapi/types/pivotgroupbycontainer.go +++ b/typedapi/types/pivotgroupbycontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // PivotGroupByContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/_types/Transform.ts#L70-L78 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/_types/Transform.ts#L70-L78 type PivotGroupByContainer struct { DateHistogram *DateHistogramAggregation `json:"date_histogram,omitempty"` GeotileGrid *GeoTileGridAggregation `json:"geotile_grid,omitempty"` diff --git a/typedapi/types/pluginsrecord.go b/typedapi/types/pluginsrecord.go index 2c8eeaca18..75835dbd86 100644 --- a/typedapi/types/pluginsrecord.go +++ b/typedapi/types/pluginsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PluginsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/plugins/types.ts#L22-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/plugins/types.ts#L22-L52 type PluginsRecord struct { // Component The component name. Component *string `json:"component,omitempty"` @@ -64,7 +65,7 @@ func (s *PluginsRecord) UnmarshalJSON(data []byte) error { case "component", "c": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Component", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -76,7 +77,7 @@ func (s *PluginsRecord) UnmarshalJSON(data []byte) error { case "description", "d": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -87,18 +88,18 @@ func (s *PluginsRecord) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "name", "n": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "type", "t": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,7 +110,7 @@ func (s *PluginsRecord) UnmarshalJSON(data []byte) error { case "version", "v": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/pluginsstatus.go b/typedapi/types/pluginsstatus.go index e2290462b9..fa5909631c 100644 --- a/typedapi/types/pluginsstatus.go +++ b/typedapi/types/pluginsstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // PluginsStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L60-L62 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L60-L62 type PluginsStatus struct { Status shutdownstatus.ShutdownStatus `json:"status"` } diff --git a/typedapi/types/pluginstats.go b/typedapi/types/pluginstats.go index 3aab994b91..b178d8333a 100644 --- a/typedapi/types/pluginstats.go +++ b/typedapi/types/pluginstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PluginStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L180-L190 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L180-L190 type PluginStats struct { Classname string `json:"classname"` Description string `json:"description"` @@ -61,7 +62,7 @@ func (s *PluginStats) UnmarshalJSON(data []byte) error { case "classname": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Classname", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -73,7 +74,7 @@ func (s *PluginStats) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,12 +85,12 @@ func (s *PluginStats) UnmarshalJSON(data []byte) error { case "elasticsearch_version": if err := dec.Decode(&s.ElasticsearchVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "ElasticsearchVersion", err) } case "extended_plugins": if err := dec.Decode(&s.ExtendedPlugins); err != nil { - return err + return fmt.Errorf("%s | %w", "ExtendedPlugins", err) } case "has_native_controller": @@ -99,7 +100,7 @@ func (s *PluginStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "HasNativeController", err) } s.HasNativeController = value case bool: @@ -108,7 +109,7 @@ func (s *PluginStats) UnmarshalJSON(data []byte) error { case "java_version": if err := dec.Decode(&s.JavaVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "JavaVersion", err) } case "licensed": @@ -118,7 +119,7 @@ func (s *PluginStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Licensed", err) } s.Licensed = value case bool: @@ -127,12 +128,12 @@ func (s *PluginStats) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/pointintimereference.go b/typedapi/types/pointintimereference.go index 614ed130fe..0ce222e368 100644 --- a/typedapi/types/pointintimereference.go +++ b/typedapi/types/pointintimereference.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // PointInTimeReference type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/PointInTimeReference.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/PointInTimeReference.ts#L23-L26 type PointInTimeReference struct { Id string `json:"id"` KeepAlive Duration `json:"keep_alive,omitempty"` @@ -52,12 +53,12 @@ func (s *PointInTimeReference) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "keep_alive": if err := dec.Decode(&s.KeepAlive); err != nil { - return err + return fmt.Errorf("%s | %w", "KeepAlive", err) } } diff --git a/typedapi/types/pointproperty.go b/typedapi/types/pointproperty.go index 86b9536a4c..45a807ab4d 100644 --- a/typedapi/types/pointproperty.go +++ b/typedapi/types/pointproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // PointProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/geo.ts#L62-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/geo.ts#L66-L71 type PointProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -71,13 +72,13 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -88,7 +89,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -97,7 +98,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -415,7 +416,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -430,7 +431,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -444,7 +445,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreZValue", err) } s.IgnoreZValue = &value case bool: @@ -456,13 +457,13 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -781,7 +782,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -797,7 +798,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -806,7 +807,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/pool.go b/typedapi/types/pool.go index 7926104331..063bd47402 100644 --- a/typedapi/types/pool.go +++ b/typedapi/types/pool.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Pool type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L878-L895 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L878-L895 type Pool struct { // MaxInBytes Maximum amount of memory, in bytes, available for use by the heap. MaxInBytes *int64 `json:"max_in_bytes,omitempty"` @@ -64,7 +65,7 @@ func (s *Pool) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxInBytes", err) } s.MaxInBytes = &value case float64: @@ -79,7 +80,7 @@ func (s *Pool) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PeakMaxInBytes", err) } s.PeakMaxInBytes = &value case float64: @@ -94,7 +95,7 @@ func (s *Pool) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PeakUsedInBytes", err) } s.PeakUsedInBytes = &value case float64: @@ -109,7 +110,7 @@ func (s *Pool) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UsedInBytes", err) } s.UsedInBytes = &value case float64: diff --git a/typedapi/types/porterstemtokenfilter.go b/typedapi/types/porterstemtokenfilter.go index b36ac5f9ee..cc580b524f 100644 --- a/typedapi/types/porterstemtokenfilter.go +++ b/typedapi/types/porterstemtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // PorterStemTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L292-L294 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L292-L294 type PorterStemTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` @@ -52,12 +53,12 @@ func (s *PorterStemTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/postmigrationfeature.go b/typedapi/types/postmigrationfeature.go index a0bcce57e9..66cdbc80b1 100644 --- a/typedapi/types/postmigrationfeature.go +++ b/typedapi/types/postmigrationfeature.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PostMigrationFeature type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/migration/post_feature_upgrade/PostFeatureUpgradeResponse.ts#L27-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/migration/post_feature_upgrade/PostFeatureUpgradeResponse.ts#L27-L29 type PostMigrationFeature struct { FeatureName string `json:"feature_name"` } @@ -53,7 +54,7 @@ func (s *PostMigrationFeature) UnmarshalJSON(data []byte) error { case "feature_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/predicatetokenfilter.go b/typedapi/types/predicatetokenfilter.go index 8fe1129d48..eacee1c829 100644 --- a/typedapi/types/predicatetokenfilter.go +++ b/typedapi/types/predicatetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // PredicateTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L296-L299 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L296-L299 type PredicateTokenFilter struct { Script Script `json:"script"` Type string `json:"type,omitempty"` @@ -54,7 +55,7 @@ func (s *PredicateTokenFilter) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -63,7 +64,7 @@ func (s *PredicateTokenFilter) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -72,7 +73,7 @@ func (s *PredicateTokenFilter) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -80,7 +81,7 @@ func (s *PredicateTokenFilter) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -89,12 +90,12 @@ func (s *PredicateTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/predictedvalue.go b/typedapi/types/predictedvalue.go index bcff396f3e..38871eef3e 100644 --- a/typedapi/types/predictedvalue.go +++ b/typedapi/types/predictedvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -27,5 +27,5 @@ package types // bool // int // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L457-L457 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L457-L457 type PredictedValue interface{} diff --git a/typedapi/types/prefixquery.go b/typedapi/types/prefixquery.go index c666823ab8..e31ae5b5b1 100644 --- a/typedapi/types/prefixquery.go +++ b/typedapi/types/prefixquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PrefixQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L87-L106 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L87-L106 type PrefixQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -81,7 +82,7 @@ func (s *PrefixQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -97,7 +98,7 @@ func (s *PrefixQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CaseInsensitive", err) } s.CaseInsensitive = &value case bool: @@ -107,7 +108,7 @@ func (s *PrefixQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -118,13 +119,13 @@ func (s *PrefixQuery) UnmarshalJSON(data []byte) error { case "rewrite": if err := dec.Decode(&s.Rewrite); err != nil { - return err + return fmt.Errorf("%s | %w", "Rewrite", err) } case "value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/preprocessor.go b/typedapi/types/preprocessor.go index d5ead88f1b..73c521fc51 100644 --- a/typedapi/types/preprocessor.go +++ b/typedapi/types/preprocessor.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Preprocessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/types.ts#L31-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/types.ts#L31-L36 type Preprocessor struct { FrequencyEncoding *FrequencyEncodingPreprocessor `json:"frequency_encoding,omitempty"` OneHotEncoding *OneHotEncodingPreprocessor `json:"one_hot_encoding,omitempty"` diff --git a/typedapi/types/pressurememory.go b/typedapi/types/pressurememory.go index 452569113f..488120dae4 100644 --- a/typedapi/types/pressurememory.go +++ b/typedapi/types/pressurememory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PressureMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L144-L199 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L144-L199 type PressureMemory struct { // All Memory consumed by indexing requests in the coordinating, primary, or replica // stage. @@ -84,7 +85,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { case "all": if err := dec.Decode(&s.All); err != nil { - return err + return fmt.Errorf("%s | %w", "All", err) } case "all_in_bytes": @@ -94,7 +95,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllInBytes", err) } s.AllInBytes = &value case float64: @@ -104,7 +105,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { case "combined_coordinating_and_primary": if err := dec.Decode(&s.CombinedCoordinatingAndPrimary); err != nil { - return err + return fmt.Errorf("%s | %w", "CombinedCoordinatingAndPrimary", err) } case "combined_coordinating_and_primary_in_bytes": @@ -114,7 +115,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CombinedCoordinatingAndPrimaryInBytes", err) } s.CombinedCoordinatingAndPrimaryInBytes = &value case float64: @@ -124,7 +125,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { case "coordinating": if err := dec.Decode(&s.Coordinating); err != nil { - return err + return fmt.Errorf("%s | %w", "Coordinating", err) } case "coordinating_in_bytes": @@ -134,7 +135,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CoordinatingInBytes", err) } s.CoordinatingInBytes = &value case float64: @@ -149,7 +150,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CoordinatingRejections", err) } s.CoordinatingRejections = &value case float64: @@ -159,7 +160,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { case "primary": if err := dec.Decode(&s.Primary); err != nil { - return err + return fmt.Errorf("%s | %w", "Primary", err) } case "primary_in_bytes": @@ -169,7 +170,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryInBytes", err) } s.PrimaryInBytes = &value case float64: @@ -184,7 +185,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryRejections", err) } s.PrimaryRejections = &value case float64: @@ -194,7 +195,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { case "replica": if err := dec.Decode(&s.Replica); err != nil { - return err + return fmt.Errorf("%s | %w", "Replica", err) } case "replica_in_bytes": @@ -204,7 +205,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ReplicaInBytes", err) } s.ReplicaInBytes = &value case float64: @@ -219,7 +220,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ReplicaRejections", err) } s.ReplicaRejections = &value case float64: diff --git a/typedapi/types/privileges.go b/typedapi/types/privileges.go index d8e76f733b..59a9da2e3b 100644 --- a/typedapi/types/privileges.go +++ b/typedapi/types/privileges.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Privileges type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/has_privileges/types.ts#L48-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/has_privileges/types.ts#L48-L48 type Privileges map[string]bool diff --git a/typedapi/types/privilegesactions.go b/typedapi/types/privilegesactions.go index 0397270ad2..ea0ba24d92 100644 --- a/typedapi/types/privilegesactions.go +++ b/typedapi/types/privilegesactions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PrivilegesActions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/put_privileges/types.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/put_privileges/types.ts#L22-L27 type PrivilegesActions struct { Actions []string `json:"actions"` Application *string `json:"application,omitempty"` @@ -55,13 +56,13 @@ func (s *PrivilegesActions) UnmarshalJSON(data []byte) error { case "actions": if err := dec.Decode(&s.Actions); err != nil { - return err + return fmt.Errorf("%s | %w", "Actions", err) } case "application": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Application", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,12 +73,12 @@ func (s *PrivilegesActions) UnmarshalJSON(data []byte) error { case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/privilegescheck.go b/typedapi/types/privilegescheck.go index f41aadeb51..5be188b54d 100644 --- a/typedapi/types/privilegescheck.go +++ b/typedapi/types/privilegescheck.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // PrivilegesCheck type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/has_privileges_user_profile/types.ts#L30-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/has_privileges_user_profile/types.ts#L30-L37 type PrivilegesCheck struct { Application []ApplicationPrivilegesCheck `json:"application,omitempty"` // Cluster A list of the cluster privileges that you want to check. diff --git a/typedapi/types/process.go b/typedapi/types/process.go index c34b1e89da..b413ddc899 100644 --- a/typedapi/types/process.go +++ b/typedapi/types/process.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Process type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L953-L975 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L953-L975 type Process struct { // Cpu Contains CPU statistics for the node. Cpu *Cpu `json:"cpu,omitempty"` @@ -64,7 +65,7 @@ func (s *Process) UnmarshalJSON(data []byte) error { case "cpu": if err := dec.Decode(&s.Cpu); err != nil { - return err + return fmt.Errorf("%s | %w", "Cpu", err) } case "max_file_descriptors": @@ -75,7 +76,7 @@ func (s *Process) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxFileDescriptors", err) } s.MaxFileDescriptors = &value case float64: @@ -85,7 +86,7 @@ func (s *Process) UnmarshalJSON(data []byte) error { case "mem": if err := dec.Decode(&s.Mem); err != nil { - return err + return fmt.Errorf("%s | %w", "Mem", err) } case "open_file_descriptors": @@ -96,7 +97,7 @@ func (s *Process) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OpenFileDescriptors", err) } s.OpenFileDescriptors = &value case float64: @@ -111,7 +112,7 @@ func (s *Process) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } s.Timestamp = &value case float64: diff --git a/typedapi/types/processor.go b/typedapi/types/processor.go index 9fd8cccc91..024f50c3d2 100644 --- a/typedapi/types/processor.go +++ b/typedapi/types/processor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Processor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L384-L401 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L384-L401 type Processor struct { // Count Number of documents transformed by the processor. Count *int64 `json:"count,omitempty"` @@ -64,7 +65,7 @@ func (s *Processor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = &value case float64: @@ -79,7 +80,7 @@ func (s *Processor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Current", err) } s.Current = &value case float64: @@ -94,7 +95,7 @@ func (s *Processor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Failed", err) } s.Failed = &value case float64: @@ -104,7 +105,7 @@ func (s *Processor) UnmarshalJSON(data []byte) error { case "time_in_millis": if err := dec.Decode(&s.TimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeInMillis", err) } } diff --git a/typedapi/types/processorcontainer.go b/typedapi/types/processorcontainer.go index 4cda1d4f6e..78a95d587f 100644 --- a/typedapi/types/processorcontainer.go +++ b/typedapi/types/processorcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ProcessorContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L27-L239 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L27-L239 type ProcessorContainer struct { // Append Appends one or more values to an existing array if the field already exists // and it is an array. diff --git a/typedapi/types/profile.go b/typedapi/types/profile.go index d5f1e22933..261d6d727d 100644 --- a/typedapi/types/profile.go +++ b/typedapi/types/profile.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Profile type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/profile.ts#L93-L95 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/profile.ts#L93-L95 type Profile struct { Shards []ShardProfile `json:"shards"` } diff --git a/typedapi/types/property.go b/typedapi/types/property.go index 38f00846fb..aa4c4f7bf0 100644 --- a/typedapi/types/property.go +++ b/typedapi/types/property.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -70,5 +70,5 @@ package types // IpRangeProperty // LongRangeProperty // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/Property.ts#L94-L158 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/Property.ts#L94-L158 type Property interface{} diff --git a/typedapi/types/publishedclusterstates.go b/typedapi/types/publishedclusterstates.go index f523312637..bfdc2001b3 100644 --- a/typedapi/types/publishedclusterstates.go +++ b/typedapi/types/publishedclusterstates.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // PublishedClusterStates type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L263-L276 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L263-L276 type PublishedClusterStates struct { // CompatibleDiffs Number of compatible differences between published cluster states. CompatibleDiffs *int64 `json:"compatible_diffs,omitempty"` @@ -62,7 +63,7 @@ func (s *PublishedClusterStates) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CompatibleDiffs", err) } s.CompatibleDiffs = &value case float64: @@ -77,7 +78,7 @@ func (s *PublishedClusterStates) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FullStates", err) } s.FullStates = &value case float64: @@ -92,7 +93,7 @@ func (s *PublishedClusterStates) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncompatibleDiffs", err) } s.IncompatibleDiffs = &value case float64: diff --git a/typedapi/types/queries.go b/typedapi/types/queries.go index da5b99382a..7cfbc15985 100644 --- a/typedapi/types/queries.go +++ b/typedapi/types/queries.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Queries type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L397-L399 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L401-L403 type Queries struct { Cache *CacheQueries `json:"cache,omitempty"` } diff --git a/typedapi/types/query.go b/typedapi/types/query.go index 723134cbe1..77d5ded833 100644 --- a/typedapi/types/query.go +++ b/typedapi/types/query.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Query type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/abstractions.ts#L100-L407 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/abstractions.ts#L100-L407 type Query struct { // Bool matches documents matching boolean combinations of other queries. Bool *BoolQuery `json:"bool,omitempty"` @@ -210,17 +211,17 @@ func (s *Query) UnmarshalJSON(data []byte) error { case "bool": if err := dec.Decode(&s.Bool); err != nil { - return err + return fmt.Errorf("%s | %w", "Bool", err) } case "boosting": if err := dec.Decode(&s.Boosting); err != nil { - return err + return fmt.Errorf("%s | %w", "Boosting", err) } case "combined_fields": if err := dec.Decode(&s.CombinedFields); err != nil { - return err + return fmt.Errorf("%s | %w", "CombinedFields", err) } case "common": @@ -228,23 +229,23 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.Common = make(map[string]CommonTermsQuery, 0) } if err := dec.Decode(&s.Common); err != nil { - return err + return fmt.Errorf("%s | %w", "Common", err) } case "constant_score": if err := dec.Decode(&s.ConstantScore); err != nil { - return err + return fmt.Errorf("%s | %w", "ConstantScore", err) } case "dis_max": if err := dec.Decode(&s.DisMax); err != nil { - return err + return fmt.Errorf("%s | %w", "DisMax", err) } case "distance_feature": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "DistanceFeature", err) } o := NewGeoDistanceFeatureQuery() err := json.Unmarshal(message, &o) @@ -252,23 +253,23 @@ func (s *Query) UnmarshalJSON(data []byte) error { o := NewDateDistanceFeatureQuery() err := json.Unmarshal(message, &o) if err != nil { - return err + return fmt.Errorf("%s | %w", "DistanceFeature", err) } } case "exists": if err := dec.Decode(&s.Exists); err != nil { - return err + return fmt.Errorf("%s | %w", "Exists", err) } case "field_masking_span": if err := dec.Decode(&s.FieldMaskingSpan); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldMaskingSpan", err) } case "function_score": if err := dec.Decode(&s.FunctionScore); err != nil { - return err + return fmt.Errorf("%s | %w", "FunctionScore", err) } case "fuzzy": @@ -276,42 +277,42 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.Fuzzy = make(map[string]FuzzyQuery, 0) } if err := dec.Decode(&s.Fuzzy); err != nil { - return err + return fmt.Errorf("%s | %w", "Fuzzy", err) } case "geo_bounding_box": if err := dec.Decode(&s.GeoBoundingBox); err != nil { - return err + return fmt.Errorf("%s | %w", "GeoBoundingBox", err) } case "geo_distance": if err := dec.Decode(&s.GeoDistance); err != nil { - return err + return fmt.Errorf("%s | %w", "GeoDistance", err) } case "geo_polygon": if err := dec.Decode(&s.GeoPolygon); err != nil { - return err + return fmt.Errorf("%s | %w", "GeoPolygon", err) } case "geo_shape": if err := dec.Decode(&s.GeoShape); err != nil { - return err + return fmt.Errorf("%s | %w", "GeoShape", err) } case "has_child": if err := dec.Decode(&s.HasChild); err != nil { - return err + return fmt.Errorf("%s | %w", "HasChild", err) } case "has_parent": if err := dec.Decode(&s.HasParent); err != nil { - return err + return fmt.Errorf("%s | %w", "HasParent", err) } case "ids": if err := dec.Decode(&s.Ids); err != nil { - return err + return fmt.Errorf("%s | %w", "Ids", err) } case "intervals": @@ -319,12 +320,12 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.Intervals = make(map[string]IntervalsQuery, 0) } if err := dec.Decode(&s.Intervals); err != nil { - return err + return fmt.Errorf("%s | %w", "Intervals", err) } case "knn": if err := dec.Decode(&s.Knn); err != nil { - return err + return fmt.Errorf("%s | %w", "Knn", err) } case "match": @@ -332,12 +333,12 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.Match = make(map[string]MatchQuery, 0) } if err := dec.Decode(&s.Match); err != nil { - return err + return fmt.Errorf("%s | %w", "Match", err) } case "match_all": if err := dec.Decode(&s.MatchAll); err != nil { - return err + return fmt.Errorf("%s | %w", "MatchAll", err) } case "match_bool_prefix": @@ -345,12 +346,12 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.MatchBoolPrefix = make(map[string]MatchBoolPrefixQuery, 0) } if err := dec.Decode(&s.MatchBoolPrefix); err != nil { - return err + return fmt.Errorf("%s | %w", "MatchBoolPrefix", err) } case "match_none": if err := dec.Decode(&s.MatchNone); err != nil { - return err + return fmt.Errorf("%s | %w", "MatchNone", err) } case "match_phrase": @@ -358,7 +359,7 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.MatchPhrase = make(map[string]MatchPhraseQuery, 0) } if err := dec.Decode(&s.MatchPhrase); err != nil { - return err + return fmt.Errorf("%s | %w", "MatchPhrase", err) } case "match_phrase_prefix": @@ -366,37 +367,37 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.MatchPhrasePrefix = make(map[string]MatchPhrasePrefixQuery, 0) } if err := dec.Decode(&s.MatchPhrasePrefix); err != nil { - return err + return fmt.Errorf("%s | %w", "MatchPhrasePrefix", err) } case "more_like_this": if err := dec.Decode(&s.MoreLikeThis); err != nil { - return err + return fmt.Errorf("%s | %w", "MoreLikeThis", err) } case "multi_match": if err := dec.Decode(&s.MultiMatch); err != nil { - return err + return fmt.Errorf("%s | %w", "MultiMatch", err) } case "nested": if err := dec.Decode(&s.Nested); err != nil { - return err + return fmt.Errorf("%s | %w", "Nested", err) } case "parent_id": if err := dec.Decode(&s.ParentId); err != nil { - return err + return fmt.Errorf("%s | %w", "ParentId", err) } case "percolate": if err := dec.Decode(&s.Percolate); err != nil { - return err + return fmt.Errorf("%s | %w", "Percolate", err) } case "pinned": if err := dec.Decode(&s.Pinned); err != nil { - return err + return fmt.Errorf("%s | %w", "Pinned", err) } case "prefix": @@ -404,12 +405,12 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.Prefix = make(map[string]PrefixQuery, 0) } if err := dec.Decode(&s.Prefix); err != nil { - return err + return fmt.Errorf("%s | %w", "Prefix", err) } case "query_string": if err := dec.Decode(&s.QueryString); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryString", err) } case "range": @@ -417,12 +418,12 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.Range = make(map[string]RangeQuery, 0) } if err := dec.Decode(&s.Range); err != nil { - return err + return fmt.Errorf("%s | %w", "Range", err) } case "rank_feature": if err := dec.Decode(&s.RankFeature); err != nil { - return err + return fmt.Errorf("%s | %w", "RankFeature", err) } case "regexp": @@ -430,62 +431,62 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.Regexp = make(map[string]RegexpQuery, 0) } if err := dec.Decode(&s.Regexp); err != nil { - return err + return fmt.Errorf("%s | %w", "Regexp", err) } case "rule_query": if err := dec.Decode(&s.RuleQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "RuleQuery", err) } case "script": if err := dec.Decode(&s.Script); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } case "script_score": if err := dec.Decode(&s.ScriptScore); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptScore", err) } case "shape": if err := dec.Decode(&s.Shape); err != nil { - return err + return fmt.Errorf("%s | %w", "Shape", err) } case "simple_query_string": if err := dec.Decode(&s.SimpleQueryString); err != nil { - return err + return fmt.Errorf("%s | %w", "SimpleQueryString", err) } case "span_containing": if err := dec.Decode(&s.SpanContaining); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanContaining", err) } case "span_first": if err := dec.Decode(&s.SpanFirst); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanFirst", err) } case "span_multi": if err := dec.Decode(&s.SpanMulti); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanMulti", err) } case "span_near": if err := dec.Decode(&s.SpanNear); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanNear", err) } case "span_not": if err := dec.Decode(&s.SpanNot); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanNot", err) } case "span_or": if err := dec.Decode(&s.SpanOr); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanOr", err) } case "span_term": @@ -493,12 +494,12 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.SpanTerm = make(map[string]SpanTermQuery, 0) } if err := dec.Decode(&s.SpanTerm); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanTerm", err) } case "span_within": if err := dec.Decode(&s.SpanWithin); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanWithin", err) } case "term": @@ -506,12 +507,12 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.Term = make(map[string]TermQuery, 0) } if err := dec.Decode(&s.Term); err != nil { - return err + return fmt.Errorf("%s | %w", "Term", err) } case "terms": if err := dec.Decode(&s.Terms); err != nil { - return err + return fmt.Errorf("%s | %w", "Terms", err) } case "terms_set": @@ -519,7 +520,7 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.TermsSet = make(map[string]TermsSetQuery, 0) } if err := dec.Decode(&s.TermsSet); err != nil { - return err + return fmt.Errorf("%s | %w", "TermsSet", err) } case "text_expansion": @@ -527,12 +528,12 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.TextExpansion = make(map[string]TextExpansionQuery, 0) } if err := dec.Decode(&s.TextExpansion); err != nil { - return err + return fmt.Errorf("%s | %w", "TextExpansion", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "weighted_tokens": @@ -540,7 +541,7 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.WeightedTokens = make(map[string]WeightedTokensQuery, 0) } if err := dec.Decode(&s.WeightedTokens); err != nil { - return err + return fmt.Errorf("%s | %w", "WeightedTokens", err) } case "wildcard": @@ -548,12 +549,12 @@ func (s *Query) UnmarshalJSON(data []byte) error { s.Wildcard = make(map[string]WildcardQuery, 0) } if err := dec.Decode(&s.Wildcard); err != nil { - return err + return fmt.Errorf("%s | %w", "Wildcard", err) } case "wrapper": if err := dec.Decode(&s.Wrapper); err != nil { - return err + return fmt.Errorf("%s | %w", "Wrapper", err) } } diff --git a/typedapi/types/querybreakdown.go b/typedapi/types/querybreakdown.go index 6414902ad2..3837bd4c7a 100644 --- a/typedapi/types/querybreakdown.go +++ b/typedapi/types/querybreakdown.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // QueryBreakdown type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/profile.ts#L97-L116 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/profile.ts#L97-L116 type QueryBreakdown struct { Advance int64 `json:"advance"` AdvanceCount int64 `json:"advance_count"` @@ -74,7 +75,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Advance", err) } s.Advance = value case float64: @@ -89,7 +90,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AdvanceCount", err) } s.AdvanceCount = value case float64: @@ -104,7 +105,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BuildScorer", err) } s.BuildScorer = value case float64: @@ -119,7 +120,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BuildScorerCount", err) } s.BuildScorerCount = value case float64: @@ -134,7 +135,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ComputeMaxScore", err) } s.ComputeMaxScore = value case float64: @@ -149,7 +150,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ComputeMaxScoreCount", err) } s.ComputeMaxScoreCount = value case float64: @@ -164,7 +165,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CreateWeight", err) } s.CreateWeight = value case float64: @@ -179,7 +180,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CreateWeightCount", err) } s.CreateWeightCount = value case float64: @@ -194,7 +195,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Match", err) } s.Match = value case float64: @@ -209,7 +210,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MatchCount", err) } s.MatchCount = value case float64: @@ -224,7 +225,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NextDoc", err) } s.NextDoc = value case float64: @@ -239,7 +240,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NextDocCount", err) } s.NextDocCount = value case float64: @@ -254,7 +255,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Score", err) } s.Score = value case float64: @@ -269,7 +270,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScoreCount", err) } s.ScoreCount = value case float64: @@ -284,7 +285,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SetMinCompetitiveScore", err) } s.SetMinCompetitiveScore = value case float64: @@ -299,7 +300,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SetMinCompetitiveScoreCount", err) } s.SetMinCompetitiveScoreCount = value case float64: @@ -314,7 +315,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShallowAdvance", err) } s.ShallowAdvance = value case float64: @@ -329,7 +330,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShallowAdvanceCount", err) } s.ShallowAdvanceCount = value case float64: diff --git a/typedapi/types/querycachestats.go b/typedapi/types/querycachestats.go index c9531d9dfd..e89a61ec00 100644 --- a/typedapi/types/querycachestats.go +++ b/typedapi/types/querycachestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // QueryCacheStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L192-L226 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L192-L226 type QueryCacheStats struct { // CacheCount Total number of entries added to the query cache across all shards assigned // to selected nodes. @@ -81,7 +82,7 @@ func (s *QueryCacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CacheCount", err) } s.CacheCount = value case float64: @@ -97,7 +98,7 @@ func (s *QueryCacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CacheSize", err) } s.CacheSize = value case float64: @@ -113,7 +114,7 @@ func (s *QueryCacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Evictions", err) } s.Evictions = value case float64: @@ -129,7 +130,7 @@ func (s *QueryCacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "HitCount", err) } s.HitCount = value case float64: @@ -139,7 +140,7 @@ func (s *QueryCacheStats) UnmarshalJSON(data []byte) error { case "memory_size": if err := dec.Decode(&s.MemorySize); err != nil { - return err + return fmt.Errorf("%s | %w", "MemorySize", err) } case "memory_size_in_bytes": @@ -149,7 +150,7 @@ func (s *QueryCacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MemorySizeInBytes", err) } s.MemorySizeInBytes = value case float64: @@ -165,7 +166,7 @@ func (s *QueryCacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MissCount", err) } s.MissCount = value case float64: @@ -181,7 +182,7 @@ func (s *QueryCacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalCount", err) } s.TotalCount = value case float64: diff --git a/typedapi/types/queryprofile.go b/typedapi/types/queryprofile.go index a7f869101c..196cbad569 100644 --- a/typedapi/types/queryprofile.go +++ b/typedapi/types/queryprofile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // QueryProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/profile.ts#L118-L124 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/profile.ts#L118-L124 type QueryProfile struct { Breakdown QueryBreakdown `json:"breakdown"` Children []QueryProfile `json:"children,omitempty"` @@ -56,18 +57,18 @@ func (s *QueryProfile) UnmarshalJSON(data []byte) error { case "breakdown": if err := dec.Decode(&s.Breakdown); err != nil { - return err + return fmt.Errorf("%s | %w", "Breakdown", err) } case "children": if err := dec.Decode(&s.Children); err != nil { - return err + return fmt.Errorf("%s | %w", "Children", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,13 +79,13 @@ func (s *QueryProfile) UnmarshalJSON(data []byte) error { case "time_in_nanos": if err := dec.Decode(&s.TimeInNanos); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeInNanos", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/queryrule.go b/typedapi/types/queryrule.go index 25532ad023..e6157ab106 100644 --- a/typedapi/types/queryrule.go +++ b/typedapi/types/queryrule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/queryruletype" @@ -31,7 +32,7 @@ import ( // QueryRule type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/query_ruleset/_types/QueryRuleset.ts#L37-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/query_ruleset/_types/QueryRuleset.ts#L37-L42 type QueryRule struct { Actions QueryRuleActions `json:"actions"` Criteria []QueryRuleCriteria `json:"criteria"` @@ -56,22 +57,22 @@ func (s *QueryRule) UnmarshalJSON(data []byte) error { case "actions": if err := dec.Decode(&s.Actions); err != nil { - return err + return fmt.Errorf("%s | %w", "Actions", err) } case "criteria": if err := dec.Decode(&s.Criteria); err != nil { - return err + return fmt.Errorf("%s | %w", "Criteria", err) } case "rule_id": if err := dec.Decode(&s.RuleId); err != nil { - return err + return fmt.Errorf("%s | %w", "RuleId", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/queryruleactions.go b/typedapi/types/queryruleactions.go index 59143817b4..33e4a24a64 100644 --- a/typedapi/types/queryruleactions.go +++ b/typedapi/types/queryruleactions.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // QueryRuleActions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/query_ruleset/_types/QueryRuleset.ts#L67-L70 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/query_ruleset/_types/QueryRuleset.ts#L67-L70 type QueryRuleActions struct { Docs []PinnedDoc `json:"docs,omitempty"` Ids []string `json:"ids,omitempty"` diff --git a/typedapi/types/queryrulecriteria.go b/typedapi/types/queryrulecriteria.go index e6c082f54b..4b66e09002 100644 --- a/typedapi/types/queryrulecriteria.go +++ b/typedapi/types/queryrulecriteria.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // QueryRuleCriteria type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/query_ruleset/_types/QueryRuleset.ts#L48-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/query_ruleset/_types/QueryRuleset.ts#L48-L52 type QueryRuleCriteria struct { Metadata string `json:"metadata"` Type queryrulecriteriatype.QueryRuleCriteriaType `json:"type"` @@ -57,7 +58,7 @@ func (s *QueryRuleCriteria) UnmarshalJSON(data []byte) error { case "metadata": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,12 +69,12 @@ func (s *QueryRuleCriteria) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "values": if err := dec.Decode(&s.Values); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } } diff --git a/typedapi/types/queryruleset.go b/typedapi/types/queryruleset.go index bdd0be6166..d113af7878 100644 --- a/typedapi/types/queryruleset.go +++ b/typedapi/types/queryruleset.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // QueryRuleset type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/query_ruleset/_types/QueryRuleset.ts#L26-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/query_ruleset/_types/QueryRuleset.ts#L26-L35 type QueryRuleset struct { // Rules Rules associated with the query ruleset Rules []QueryRule `json:"rules"` @@ -54,12 +55,12 @@ func (s *QueryRuleset) UnmarshalJSON(data []byte) error { case "rules": if err := dec.Decode(&s.Rules); err != nil { - return err + return fmt.Errorf("%s | %w", "Rules", err) } case "ruleset_id": if err := dec.Decode(&s.RulesetId); err != nil { - return err + return fmt.Errorf("%s | %w", "RulesetId", err) } } diff --git a/typedapi/types/queryrulesetlistitem.go b/typedapi/types/queryrulesetlistitem.go index 1498ef154f..552065fe6c 100644 --- a/typedapi/types/queryrulesetlistitem.go +++ b/typedapi/types/queryrulesetlistitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // QueryRulesetListItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/query_ruleset/list/types.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/query_ruleset/list/types.ts#L22-L31 type QueryRulesetListItem struct { // RulesCount The number of rules associated with this ruleset RulesCount int `json:"rules_count"` @@ -61,7 +62,7 @@ func (s *QueryRulesetListItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RulesCount", err) } s.RulesCount = value case float64: @@ -71,7 +72,7 @@ func (s *QueryRulesetListItem) UnmarshalJSON(data []byte) error { case "ruleset_id": if err := dec.Decode(&s.RulesetId); err != nil { - return err + return fmt.Errorf("%s | %w", "RulesetId", err) } } diff --git a/typedapi/types/querystringquery.go b/typedapi/types/querystringquery.go index 36014c2397..9220724d05 100644 --- a/typedapi/types/querystringquery.go +++ b/typedapi/types/querystringquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // QueryStringQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L580-L700 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L580-L700 type QueryStringQuery struct { // AllowLeadingWildcard If `true`, the wildcard characters `*` and `?` are allowed as the first // character of the query string. @@ -129,7 +130,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowLeadingWildcard", err) } s.AllowLeadingWildcard = &value case bool: @@ -143,7 +144,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AnalyzeWildcard", err) } s.AnalyzeWildcard = &value case bool: @@ -153,7 +154,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -169,7 +170,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AutoGenerateSynonymsPhraseQuery", err) } s.AutoGenerateSynonymsPhraseQuery = &value case bool: @@ -183,7 +184,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -194,12 +195,12 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "default_field": if err := dec.Decode(&s.DefaultField); err != nil { - return err + return fmt.Errorf("%s | %w", "DefaultField", err) } case "default_operator": if err := dec.Decode(&s.DefaultOperator); err != nil { - return err + return fmt.Errorf("%s | %w", "DefaultOperator", err) } case "enable_position_increments": @@ -209,7 +210,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EnablePositionIncrements", err) } s.EnablePositionIncrements = &value case bool: @@ -223,7 +224,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Escape", err) } s.Escape = &value case bool: @@ -232,12 +233,12 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "fields": if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "fuzziness": if err := dec.Decode(&s.Fuzziness); err != nil { - return err + return fmt.Errorf("%s | %w", "Fuzziness", err) } case "fuzzy_max_expansions": @@ -248,7 +249,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FuzzyMaxExpansions", err) } s.FuzzyMaxExpansions = &value case float64: @@ -264,7 +265,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FuzzyPrefixLength", err) } s.FuzzyPrefixLength = &value case float64: @@ -274,7 +275,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "fuzzy_rewrite": if err := dec.Decode(&s.FuzzyRewrite); err != nil { - return err + return fmt.Errorf("%s | %w", "FuzzyRewrite", err) } case "fuzzy_transpositions": @@ -284,7 +285,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FuzzyTranspositions", err) } s.FuzzyTranspositions = &value case bool: @@ -298,7 +299,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lenient", err) } s.Lenient = &value case bool: @@ -313,7 +314,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxDeterminizedStates", err) } s.MaxDeterminizedStates = &value case float64: @@ -323,7 +324,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "minimum_should_match": if err := dec.Decode(&s.MinimumShouldMatch); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatch", err) } case "phrase_slop": @@ -333,7 +334,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PhraseSlop", err) } f := Float64(value) s.PhraseSlop = &f @@ -345,7 +346,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -357,7 +358,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -369,7 +370,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "quote_analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QuoteAnalyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -381,7 +382,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "quote_field_suffix": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QuoteFieldSuffix", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -392,7 +393,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "rewrite": if err := dec.Decode(&s.Rewrite); err != nil { - return err + return fmt.Errorf("%s | %w", "Rewrite", err) } case "tie_breaker": @@ -402,7 +403,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TieBreaker", err) } f := Float64(value) s.TieBreaker = &f @@ -413,12 +414,12 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "time_zone": if err := dec.Decode(&s.TimeZone); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeZone", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/queryvectorbuilder.go b/typedapi/types/queryvectorbuilder.go index 237ae29dbd..6fc82d5715 100644 --- a/typedapi/types/queryvectorbuilder.go +++ b/typedapi/types/queryvectorbuilder.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // QueryVectorBuilder type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Knn.ts#L51-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Knn.ts#L51-L54 type QueryVectorBuilder struct { TextEmbedding *TextEmbedding `json:"text_embedding,omitempty"` } diff --git a/typedapi/types/querywatch.go b/typedapi/types/querywatch.go index 4e0bf4c788..bc7e991413 100644 --- a/typedapi/types/querywatch.go +++ b/typedapi/types/querywatch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // QueryWatch type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Watch.ts#L58-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Watch.ts#L58-L64 type QueryWatch struct { Id_ string `json:"_id"` PrimaryTerm_ *int `json:"_primary_term,omitempty"` @@ -56,7 +57,7 @@ func (s *QueryWatch) UnmarshalJSON(data []byte) error { case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_primary_term": @@ -67,7 +68,7 @@ func (s *QueryWatch) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryTerm_", err) } s.PrimaryTerm_ = &value case float64: @@ -77,17 +78,17 @@ func (s *QueryWatch) UnmarshalJSON(data []byte) error { case "_seq_no": if err := dec.Decode(&s.SeqNo_); err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNo_", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "watch": if err := dec.Decode(&s.Watch); err != nil { - return err + return fmt.Errorf("%s | %w", "Watch", err) } } diff --git a/typedapi/types/questionansweringinferenceoptions.go b/typedapi/types/questionansweringinferenceoptions.go index e31ee62180..eac95e35a5 100644 --- a/typedapi/types/questionansweringinferenceoptions.go +++ b/typedapi/types/questionansweringinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // QuestionAnsweringInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L282-L292 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L282-L292 type QuestionAnsweringInferenceOptions struct { // MaxAnswerLength The maximum answer length to consider MaxAnswerLength *int `json:"max_answer_length,omitempty"` @@ -66,7 +67,7 @@ func (s *QuestionAnsweringInferenceOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAnswerLength", err) } s.MaxAnswerLength = &value case float64: @@ -82,7 +83,7 @@ func (s *QuestionAnsweringInferenceOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopClasses", err) } s.NumTopClasses = &value case float64: @@ -93,7 +94,7 @@ func (s *QuestionAnsweringInferenceOptions) UnmarshalJSON(data []byte) error { case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,7 +105,7 @@ func (s *QuestionAnsweringInferenceOptions) UnmarshalJSON(data []byte) error { case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/types/questionansweringinferenceupdateoptions.go b/typedapi/types/questionansweringinferenceupdateoptions.go index e664f9aa98..689310d9ee 100644 --- a/typedapi/types/questionansweringinferenceupdateoptions.go +++ b/typedapi/types/questionansweringinferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // QuestionAnsweringInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L420-L431 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L420-L431 type QuestionAnsweringInferenceUpdateOptions struct { // MaxAnswerLength The maximum answer length to consider for extraction MaxAnswerLength *int `json:"max_answer_length,omitempty"` @@ -68,7 +69,7 @@ func (s *QuestionAnsweringInferenceUpdateOptions) UnmarshalJSON(data []byte) err case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAnswerLength", err) } s.MaxAnswerLength = &value case float64: @@ -84,7 +85,7 @@ func (s *QuestionAnsweringInferenceUpdateOptions) UnmarshalJSON(data []byte) err case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopClasses", err) } s.NumTopClasses = &value case float64: @@ -95,7 +96,7 @@ func (s *QuestionAnsweringInferenceUpdateOptions) UnmarshalJSON(data []byte) err case "question": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Question", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -107,7 +108,7 @@ func (s *QuestionAnsweringInferenceUpdateOptions) UnmarshalJSON(data []byte) err case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -118,7 +119,7 @@ func (s *QuestionAnsweringInferenceUpdateOptions) UnmarshalJSON(data []byte) err case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/types/randomscorefunction.go b/typedapi/types/randomscorefunction.go index c008d72dc5..ad50d14e69 100644 --- a/typedapi/types/randomscorefunction.go +++ b/typedapi/types/randomscorefunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RandomScoreFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L127-L130 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L127-L130 type RandomScoreFunction struct { Field *string `json:"field,omitempty"` Seed string `json:"seed,omitempty"` @@ -53,13 +54,13 @@ func (s *RandomScoreFunction) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "seed": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Seed", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/rangeaggregate.go b/typedapi/types/rangeaggregate.go index 092a15cbf6..aeeaa7212a 100644 --- a/typedapi/types/rangeaggregate.go +++ b/typedapi/types/rangeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RangeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L531-L532 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L531-L532 type RangeAggregate struct { Buckets BucketsRangeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *RangeAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]RangeBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []RangeBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/rangeaggregation.go b/typedapi/types/rangeaggregation.go index c05376df6c..217a75a324 100644 --- a/typedapi/types/rangeaggregation.go +++ b/typedapi/types/rangeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RangeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L650-L670 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L650-L670 type RangeAggregation struct { // Field The date field whose values are use to build ranges. Field *string `json:"field,omitempty"` @@ -65,13 +66,13 @@ func (s *RangeAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -87,7 +88,7 @@ func (s *RangeAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Keyed", err) } s.Keyed = &value case bool: @@ -96,7 +97,7 @@ func (s *RangeAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "missing": @@ -107,7 +108,7 @@ func (s *RangeAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } s.Missing = &value case float64: @@ -118,7 +119,7 @@ func (s *RangeAggregation) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -129,13 +130,13 @@ func (s *RangeAggregation) UnmarshalJSON(data []byte) error { case "ranges": if err := dec.Decode(&s.Ranges); err != nil { - return err + return fmt.Errorf("%s | %w", "Ranges", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -144,7 +145,7 @@ func (s *RangeAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -153,7 +154,7 @@ func (s *RangeAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -161,7 +162,7 @@ func (s *RangeAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/rangebucket.go b/typedapi/types/rangebucket.go index c9d4d5774f..f72a9401c3 100644 --- a/typedapi/types/rangebucket.go +++ b/typedapi/types/rangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // RangeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L534-L541 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L534-L541 type RangeBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -66,7 +66,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -81,7 +81,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } f := Float64(value) s.From = &f @@ -93,7 +93,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { case "from_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FromAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +105,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { case "key": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -121,7 +121,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "To", err) } f := Float64(value) s.To = &f @@ -133,7 +133,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { case "to_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ToAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -156,490 +156,490 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -649,7 +649,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/rangequery.go b/typedapi/types/rangequery.go index cf487e5665..ffbf58c2fa 100644 --- a/typedapi/types/rangequery.go +++ b/typedapi/types/rangequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // DateRangeQuery // NumberRangeQuery // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L166-L168 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L166-L168 type RangeQuery interface{} diff --git a/typedapi/types/rankcontainer.go b/typedapi/types/rankcontainer.go index f924ab43c0..be7251a6e9 100644 --- a/typedapi/types/rankcontainer.go +++ b/typedapi/types/rankcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // RankContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Rank.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Rank.ts#L22-L28 type RankContainer struct { // Rrf The reciprocal rank fusion parameters Rrf *RrfRank `json:"rrf,omitempty"` diff --git a/typedapi/types/rankevalhit.go b/typedapi/types/rankevalhit.go index 372ce523ec..f22b93017f 100644 --- a/typedapi/types/rankevalhit.go +++ b/typedapi/types/rankevalhit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RankEvalHit type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L141-L145 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L141-L145 type RankEvalHit struct { Id_ string `json:"_id"` Index_ string `json:"_index"` @@ -54,12 +55,12 @@ func (s *RankEvalHit) UnmarshalJSON(data []byte) error { case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "_score": @@ -69,7 +70,7 @@ func (s *RankEvalHit) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Score_", err) } f := Float64(value) s.Score_ = f diff --git a/typedapi/types/rankevalhititem.go b/typedapi/types/rankevalhititem.go index f0b8a02b0b..853048a391 100644 --- a/typedapi/types/rankevalhititem.go +++ b/typedapi/types/rankevalhititem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RankEvalHitItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L136-L139 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L136-L139 type RankEvalHitItem struct { Hit RankEvalHit `json:"hit"` Rating Float64 `json:"rating,omitempty"` @@ -52,12 +53,12 @@ func (s *RankEvalHitItem) UnmarshalJSON(data []byte) error { case "hit": if err := dec.Decode(&s.Hit); err != nil { - return err + return fmt.Errorf("%s | %w", "Hit", err) } case "rating": if err := dec.Decode(&s.Rating); err != nil { - return err + return fmt.Errorf("%s | %w", "Rating", err) } } diff --git a/typedapi/types/rankevalmetric.go b/typedapi/types/rankevalmetric.go index 0edea777e9..5a6259789f 100644 --- a/typedapi/types/rankevalmetric.go +++ b/typedapi/types/rankevalmetric.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // RankEvalMetric type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L90-L96 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L90-L96 type RankEvalMetric struct { Dcg *RankEvalMetricDiscountedCumulativeGain `json:"dcg,omitempty"` ExpectedReciprocalRank *RankEvalMetricExpectedReciprocalRank `json:"expected_reciprocal_rank,omitempty"` diff --git a/typedapi/types/rankevalmetricdetail.go b/typedapi/types/rankevalmetricdetail.go index 107bf330fd..99dbc856b0 100644 --- a/typedapi/types/rankevalmetricdetail.go +++ b/typedapi/types/rankevalmetricdetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RankEvalMetricDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L125-L134 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L125-L134 type RankEvalMetricDetail struct { // Hits The hits section shows a grouping of the search results with their supplied // ratings @@ -65,7 +66,7 @@ func (s *RankEvalMetricDetail) UnmarshalJSON(data []byte) error { case "hits": if err := dec.Decode(&s.Hits); err != nil { - return err + return fmt.Errorf("%s | %w", "Hits", err) } case "metric_details": @@ -73,7 +74,7 @@ func (s *RankEvalMetricDetail) UnmarshalJSON(data []byte) error { s.MetricDetails = make(map[string]map[string]json.RawMessage, 0) } if err := dec.Decode(&s.MetricDetails); err != nil { - return err + return fmt.Errorf("%s | %w", "MetricDetails", err) } case "metric_score": @@ -83,7 +84,7 @@ func (s *RankEvalMetricDetail) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MetricScore", err) } f := Float64(value) s.MetricScore = f @@ -94,7 +95,7 @@ func (s *RankEvalMetricDetail) UnmarshalJSON(data []byte) error { case "unrated_docs": if err := dec.Decode(&s.UnratedDocs); err != nil { - return err + return fmt.Errorf("%s | %w", "UnratedDocs", err) } } diff --git a/typedapi/types/rankevalmetricdiscountedcumulativegain.go b/typedapi/types/rankevalmetricdiscountedcumulativegain.go index 3afe8e1c2f..5b68e93758 100644 --- a/typedapi/types/rankevalmetricdiscountedcumulativegain.go +++ b/typedapi/types/rankevalmetricdiscountedcumulativegain.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RankEvalMetricDiscountedCumulativeGain type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L66-L77 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L66-L77 type RankEvalMetricDiscountedCumulativeGain struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -62,7 +63,7 @@ func (s *RankEvalMetricDiscountedCumulativeGain) UnmarshalJSON(data []byte) erro case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "K", err) } s.K = &value case float64: @@ -77,7 +78,7 @@ func (s *RankEvalMetricDiscountedCumulativeGain) UnmarshalJSON(data []byte) erro case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Normalize", err) } s.Normalize = &value case bool: diff --git a/typedapi/types/rankevalmetricexpectedreciprocalrank.go b/typedapi/types/rankevalmetricexpectedreciprocalrank.go index 14bb34e24c..f16857cfbb 100644 --- a/typedapi/types/rankevalmetricexpectedreciprocalrank.go +++ b/typedapi/types/rankevalmetricexpectedreciprocalrank.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RankEvalMetricExpectedReciprocalRank type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L79-L88 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L79-L88 type RankEvalMetricExpectedReciprocalRank struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -62,7 +63,7 @@ func (s *RankEvalMetricExpectedReciprocalRank) UnmarshalJSON(data []byte) error case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "K", err) } s.K = &value case float64: @@ -78,7 +79,7 @@ func (s *RankEvalMetricExpectedReciprocalRank) UnmarshalJSON(data []byte) error case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaximumRelevance", err) } s.MaximumRelevance = value case float64: diff --git a/typedapi/types/rankevalmetricmeanreciprocalrank.go b/typedapi/types/rankevalmetricmeanreciprocalrank.go index 7da204bd58..606c77b27c 100644 --- a/typedapi/types/rankevalmetricmeanreciprocalrank.go +++ b/typedapi/types/rankevalmetricmeanreciprocalrank.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RankEvalMetricMeanReciprocalRank type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L60-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L60-L64 type RankEvalMetricMeanReciprocalRank struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -63,7 +64,7 @@ func (s *RankEvalMetricMeanReciprocalRank) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "K", err) } s.K = &value case float64: @@ -79,7 +80,7 @@ func (s *RankEvalMetricMeanReciprocalRank) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RelevantRatingThreshold", err) } s.RelevantRatingThreshold = &value case float64: diff --git a/typedapi/types/rankevalmetricprecision.go b/typedapi/types/rankevalmetricprecision.go index 36af313923..a4eb5f1677 100644 --- a/typedapi/types/rankevalmetricprecision.go +++ b/typedapi/types/rankevalmetricprecision.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RankEvalMetricPrecision type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L42-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L42-L52 type RankEvalMetricPrecision struct { // IgnoreUnlabeled Controls how unlabeled documents in the search results are counted. If set to // true, unlabeled documents are ignored and neither count as relevant or @@ -66,7 +67,7 @@ func (s *RankEvalMetricPrecision) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnlabeled", err) } s.IgnoreUnlabeled = &value case bool: @@ -81,7 +82,7 @@ func (s *RankEvalMetricPrecision) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "K", err) } s.K = &value case float64: @@ -97,7 +98,7 @@ func (s *RankEvalMetricPrecision) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RelevantRatingThreshold", err) } s.RelevantRatingThreshold = &value case float64: diff --git a/typedapi/types/rankevalmetricratingtreshold.go b/typedapi/types/rankevalmetricratingtreshold.go index ebe393db5c..b0cf9eb813 100644 --- a/typedapi/types/rankevalmetricratingtreshold.go +++ b/typedapi/types/rankevalmetricratingtreshold.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RankEvalMetricRatingTreshold type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L34-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L34-L40 type RankEvalMetricRatingTreshold struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -63,7 +64,7 @@ func (s *RankEvalMetricRatingTreshold) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "K", err) } s.K = &value case float64: @@ -79,7 +80,7 @@ func (s *RankEvalMetricRatingTreshold) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RelevantRatingThreshold", err) } s.RelevantRatingThreshold = &value case float64: diff --git a/typedapi/types/rankevalmetricrecall.go b/typedapi/types/rankevalmetricrecall.go index 916c56350b..962c5461e2 100644 --- a/typedapi/types/rankevalmetricrecall.go +++ b/typedapi/types/rankevalmetricrecall.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RankEvalMetricRecall type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L54-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L54-L58 type RankEvalMetricRecall struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -63,7 +64,7 @@ func (s *RankEvalMetricRecall) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "K", err) } s.K = &value case float64: @@ -79,7 +80,7 @@ func (s *RankEvalMetricRecall) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RelevantRatingThreshold", err) } s.RelevantRatingThreshold = &value case float64: diff --git a/typedapi/types/rankevalquery.go b/typedapi/types/rankevalquery.go index d94d60a855..b1099e62ec 100644 --- a/typedapi/types/rankevalquery.go +++ b/typedapi/types/rankevalquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RankEvalQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L111-L114 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L111-L114 type RankEvalQuery struct { Query Query `json:"query"` Size *int `json:"size,omitempty"` @@ -53,7 +54,7 @@ func (s *RankEvalQuery) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "size": @@ -64,7 +65,7 @@ func (s *RankEvalQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: diff --git a/typedapi/types/rankevalrequestitem.go b/typedapi/types/rankevalrequestitem.go index 18ab2b997f..5c51e9657a 100644 --- a/typedapi/types/rankevalrequestitem.go +++ b/typedapi/types/rankevalrequestitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RankEvalRequestItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L98-L109 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L98-L109 type RankEvalRequestItem struct { // Id The search request’s ID, used to group result details later. Id string `json:"id"` @@ -60,7 +61,7 @@ func (s *RankEvalRequestItem) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "params": @@ -68,22 +69,22 @@ func (s *RankEvalRequestItem) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "ratings": if err := dec.Decode(&s.Ratings); err != nil { - return err + return fmt.Errorf("%s | %w", "Ratings", err) } case "request": if err := dec.Decode(&s.Request); err != nil { - return err + return fmt.Errorf("%s | %w", "Request", err) } case "template_id": if err := dec.Decode(&s.TemplateId); err != nil { - return err + return fmt.Errorf("%s | %w", "TemplateId", err) } } diff --git a/typedapi/types/rankfeaturefunction.go b/typedapi/types/rankfeaturefunction.go index a6244d4f98..01c8719fd4 100644 --- a/typedapi/types/rankfeaturefunction.go +++ b/typedapi/types/rankfeaturefunction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // RankFeatureFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L264-L264 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L264-L264 type RankFeatureFunction struct { } diff --git a/typedapi/types/rankfeaturefunctionlinear.go b/typedapi/types/rankfeaturefunctionlinear.go index e0fd1cd697..ea5644fe41 100644 --- a/typedapi/types/rankfeaturefunctionlinear.go +++ b/typedapi/types/rankfeaturefunctionlinear.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // RankFeatureFunctionLinear type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L266-L266 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L266-L266 type RankFeatureFunctionLinear struct { } diff --git a/typedapi/types/rankfeaturefunctionlogarithm.go b/typedapi/types/rankfeaturefunctionlogarithm.go index 55a3666c46..9725349855 100644 --- a/typedapi/types/rankfeaturefunctionlogarithm.go +++ b/typedapi/types/rankfeaturefunctionlogarithm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RankFeatureFunctionLogarithm type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L268-L273 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L268-L273 type RankFeatureFunctionLogarithm struct { // ScalingFactor Configurable scaling factor. ScalingFactor float32 `json:"scaling_factor"` @@ -58,7 +59,7 @@ func (s *RankFeatureFunctionLogarithm) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScalingFactor", err) } f := float32(value) s.ScalingFactor = f diff --git a/typedapi/types/rankfeaturefunctionsaturation.go b/typedapi/types/rankfeaturefunctionsaturation.go index 4335668de3..c0148f50ef 100644 --- a/typedapi/types/rankfeaturefunctionsaturation.go +++ b/typedapi/types/rankfeaturefunctionsaturation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RankFeatureFunctionSaturation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L275-L280 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L275-L280 type RankFeatureFunctionSaturation struct { // Pivot Configurable pivot value so that the result will be less than 0.5. Pivot *float32 `json:"pivot,omitempty"` @@ -58,7 +59,7 @@ func (s *RankFeatureFunctionSaturation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Pivot", err) } f := float32(value) s.Pivot = &f diff --git a/typedapi/types/rankfeaturefunctionsigmoid.go b/typedapi/types/rankfeaturefunctionsigmoid.go index d0a32783d3..45d5265965 100644 --- a/typedapi/types/rankfeaturefunctionsigmoid.go +++ b/typedapi/types/rankfeaturefunctionsigmoid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RankFeatureFunctionSigmoid type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L282-L291 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L282-L291 type RankFeatureFunctionSigmoid struct { // Exponent Configurable Exponent. Exponent float32 `json:"exponent"` @@ -60,7 +61,7 @@ func (s *RankFeatureFunctionSigmoid) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Exponent", err) } f := float32(value) s.Exponent = f @@ -76,7 +77,7 @@ func (s *RankFeatureFunctionSigmoid) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Pivot", err) } f := float32(value) s.Pivot = f diff --git a/typedapi/types/rankfeatureproperty.go b/typedapi/types/rankfeatureproperty.go index 8153c170de..409194b697 100644 --- a/typedapi/types/rankfeatureproperty.go +++ b/typedapi/types/rankfeatureproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // RankFeatureProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L184-L187 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L184-L187 type RankFeatureProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -61,7 +62,7 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -379,7 +380,7 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -392,7 +393,7 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "positive_score_impact": @@ -402,7 +403,7 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PositiveScoreImpact", err) } s.PositiveScoreImpact = &value case bool: @@ -718,7 +719,7 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/rankfeaturequery.go b/typedapi/types/rankfeaturequery.go index 5f9804b870..2ced329653 100644 --- a/typedapi/types/rankfeaturequery.go +++ b/typedapi/types/rankfeaturequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RankFeatureQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L293-L316 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L293-L316 type RankFeatureQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -77,7 +78,7 @@ func (s *RankFeatureQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -88,23 +89,23 @@ func (s *RankFeatureQuery) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "linear": if err := dec.Decode(&s.Linear); err != nil { - return err + return fmt.Errorf("%s | %w", "Linear", err) } case "log": if err := dec.Decode(&s.Log); err != nil { - return err + return fmt.Errorf("%s | %w", "Log", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -115,12 +116,12 @@ func (s *RankFeatureQuery) UnmarshalJSON(data []byte) error { case "saturation": if err := dec.Decode(&s.Saturation); err != nil { - return err + return fmt.Errorf("%s | %w", "Saturation", err) } case "sigmoid": if err := dec.Decode(&s.Sigmoid); err != nil { - return err + return fmt.Errorf("%s | %w", "Sigmoid", err) } } diff --git a/typedapi/types/rankfeaturesproperty.go b/typedapi/types/rankfeaturesproperty.go index ead5ab1b1e..fe2e2462c0 100644 --- a/typedapi/types/rankfeaturesproperty.go +++ b/typedapi/types/rankfeaturesproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,15 +33,16 @@ import ( // RankFeaturesProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L189-L191 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L189-L192 type RankFeaturesProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` // Meta Metadata about the field. - Meta map[string]string `json:"meta,omitempty"` - Properties map[string]Property `json:"properties,omitempty"` - Type string `json:"type,omitempty"` + Meta map[string]string `json:"meta,omitempty"` + PositiveScoreImpact *bool `json:"positive_score_impact,omitempty"` + Properties map[string]Property `json:"properties,omitempty"` + Type string `json:"type,omitempty"` } func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { @@ -60,7 +62,7 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -378,7 +380,7 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -391,7 +393,21 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) + } + + case "positive_score_impact": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "PositiveScoreImpact", err) + } + s.PositiveScoreImpact = &value + case bool: + s.PositiveScoreImpact = &v } case "properties": @@ -703,7 +719,7 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } @@ -715,12 +731,13 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { func (s RankFeaturesProperty) MarshalJSON() ([]byte, error) { type innerRankFeaturesProperty RankFeaturesProperty tmp := innerRankFeaturesProperty{ - Dynamic: s.Dynamic, - Fields: s.Fields, - IgnoreAbove: s.IgnoreAbove, - Meta: s.Meta, - Properties: s.Properties, - Type: s.Type, + Dynamic: s.Dynamic, + Fields: s.Fields, + IgnoreAbove: s.IgnoreAbove, + Meta: s.Meta, + PositiveScoreImpact: s.PositiveScoreImpact, + Properties: s.Properties, + Type: s.Type, } tmp.Type = "rank_features" diff --git a/typedapi/types/raretermsaggregation.go b/typedapi/types/raretermsaggregation.go index 5e0f0904c1..ce03786158 100644 --- a/typedapi/types/raretermsaggregation.go +++ b/typedapi/types/raretermsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RareTermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L687-L717 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L687-L717 type RareTermsAggregation struct { // Exclude Terms that should be excluded from the aggregation. Exclude []string `json:"exclude,omitempty"` @@ -72,24 +73,24 @@ func (s *RareTermsAggregation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Exclude", err) } s.Exclude = append(s.Exclude, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Exclude); err != nil { - return err + return fmt.Errorf("%s | %w", "Exclude", err) } } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "include": if err := dec.Decode(&s.Include); err != nil { - return err + return fmt.Errorf("%s | %w", "Include", err) } case "max_doc_count": @@ -99,7 +100,7 @@ func (s *RareTermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxDocCount", err) } s.MaxDocCount = &value case float64: @@ -109,18 +110,18 @@ func (s *RareTermsAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -136,7 +137,7 @@ func (s *RareTermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Precision", err) } f := Float64(value) s.Precision = &f @@ -148,7 +149,7 @@ func (s *RareTermsAggregation) UnmarshalJSON(data []byte) error { case "value_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/rateaggregate.go b/typedapi/types/rateaggregate.go index 5fbef132de..0d6b93db5f 100644 --- a/typedapi/types/rateaggregate.go +++ b/typedapi/types/rateaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RateAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L741-L745 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L741-L745 type RateAggregate struct { Meta Metadata `json:"meta,omitempty"` Value Float64 `json:"value"` @@ -54,7 +55,7 @@ func (s *RateAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": @@ -64,7 +65,7 @@ func (s *RateAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } f := Float64(value) s.Value = f @@ -76,7 +77,7 @@ func (s *RateAggregate) UnmarshalJSON(data []byte) error { case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/rateaggregation.go b/typedapi/types/rateaggregation.go index 0debcba385..e9d45960b5 100644 --- a/typedapi/types/rateaggregation.go +++ b/typedapi/types/rateaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // RateAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L230-L241 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L230-L241 type RateAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -66,13 +67,13 @@ func (s *RateAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,18 +84,18 @@ func (s *RateAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -103,7 +104,7 @@ func (s *RateAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -112,7 +113,7 @@ func (s *RateAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -120,7 +121,7 @@ func (s *RateAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -129,7 +130,7 @@ func (s *RateAggregation) UnmarshalJSON(data []byte) error { case "unit": if err := dec.Decode(&s.Unit); err != nil { - return err + return fmt.Errorf("%s | %w", "Unit", err) } } diff --git a/typedapi/types/readexception.go b/typedapi/types/readexception.go index 1dc2e8e7e2..87c8663c1d 100644 --- a/typedapi/types/readexception.go +++ b/typedapi/types/readexception.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ReadException type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ccr/_types/FollowIndexStats.ts#L71-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ccr/_types/FollowIndexStats.ts#L71-L75 type ReadException struct { Exception ErrorCause `json:"exception"` FromSeqNo int64 `json:"from_seq_no"` @@ -54,12 +55,12 @@ func (s *ReadException) UnmarshalJSON(data []byte) error { case "exception": if err := dec.Decode(&s.Exception); err != nil { - return err + return fmt.Errorf("%s | %w", "Exception", err) } case "from_seq_no": if err := dec.Decode(&s.FromSeqNo); err != nil { - return err + return fmt.Errorf("%s | %w", "FromSeqNo", err) } case "retries": @@ -70,7 +71,7 @@ func (s *ReadException) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Retries", err) } s.Retries = value case float64: diff --git a/typedapi/types/readonlyurlrepository.go b/typedapi/types/readonlyurlrepository.go new file mode 100644 index 0000000000..61717502de --- /dev/null +++ b/typedapi/types/readonlyurlrepository.go @@ -0,0 +1,94 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" +) + +// ReadOnlyUrlRepository type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotRepository.ts#L60-L63 +type ReadOnlyUrlRepository struct { + Settings ReadOnlyUrlRepositorySettings `json:"settings"` + Type string `json:"type,omitempty"` + Uuid *string `json:"uuid,omitempty"` +} + +func (s *ReadOnlyUrlRepository) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return fmt.Errorf("%s | %w", "Settings", err) + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return fmt.Errorf("%s | %w", "Type", err) + } + + case "uuid": + if err := dec.Decode(&s.Uuid); err != nil { + return fmt.Errorf("%s | %w", "Uuid", err) + } + + } + } + return nil +} + +// MarshalJSON override marshalling to include literal value +func (s ReadOnlyUrlRepository) MarshalJSON() ([]byte, error) { + type innerReadOnlyUrlRepository ReadOnlyUrlRepository + tmp := innerReadOnlyUrlRepository{ + Settings: s.Settings, + Type: s.Type, + Uuid: s.Uuid, + } + + tmp.Type = "url" + + return json.Marshal(tmp) +} + +// NewReadOnlyUrlRepository returns a ReadOnlyUrlRepository. +func NewReadOnlyUrlRepository() *ReadOnlyUrlRepository { + r := &ReadOnlyUrlRepository{} + + return r +} diff --git a/typedapi/types/readonlyurlrepositorysettings.go b/typedapi/types/readonlyurlrepositorysettings.go new file mode 100644 index 0000000000..4d2af8b24d --- /dev/null +++ b/typedapi/types/readonlyurlrepositorysettings.go @@ -0,0 +1,149 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// ReadOnlyUrlRepositorySettings type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotRepository.ts#L110-L115 +type ReadOnlyUrlRepositorySettings struct { + ChunkSize ByteSize `json:"chunk_size,omitempty"` + Compress *bool `json:"compress,omitempty"` + HttpMaxRetries *int `json:"http_max_retries,omitempty"` + HttpSocketTimeout Duration `json:"http_socket_timeout,omitempty"` + MaxNumberOfSnapshots *int `json:"max_number_of_snapshots,omitempty"` + MaxRestoreBytesPerSec ByteSize `json:"max_restore_bytes_per_sec,omitempty"` + MaxSnapshotBytesPerSec ByteSize `json:"max_snapshot_bytes_per_sec,omitempty"` + Url string `json:"url"` +} + +func (s *ReadOnlyUrlRepositorySettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "chunk_size": + if err := dec.Decode(&s.ChunkSize); err != nil { + return fmt.Errorf("%s | %w", "ChunkSize", err) + } + + case "compress": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Compress", err) + } + s.Compress = &value + case bool: + s.Compress = &v + } + + case "http_max_retries": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "HttpMaxRetries", err) + } + s.HttpMaxRetries = &value + case float64: + f := int(v) + s.HttpMaxRetries = &f + } + + case "http_socket_timeout": + if err := dec.Decode(&s.HttpSocketTimeout); err != nil { + return fmt.Errorf("%s | %w", "HttpSocketTimeout", err) + } + + case "max_number_of_snapshots": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "MaxNumberOfSnapshots", err) + } + s.MaxNumberOfSnapshots = &value + case float64: + f := int(v) + s.MaxNumberOfSnapshots = &f + } + + case "max_restore_bytes_per_sec": + if err := dec.Decode(&s.MaxRestoreBytesPerSec); err != nil { + return fmt.Errorf("%s | %w", "MaxRestoreBytesPerSec", err) + } + + case "max_snapshot_bytes_per_sec": + if err := dec.Decode(&s.MaxSnapshotBytesPerSec); err != nil { + return fmt.Errorf("%s | %w", "MaxSnapshotBytesPerSec", err) + } + + case "url": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Url", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Url = o + + } + } + return nil +} + +// NewReadOnlyUrlRepositorySettings returns a ReadOnlyUrlRepositorySettings. +func NewReadOnlyUrlRepositorySettings() *ReadOnlyUrlRepositorySettings { + r := &ReadOnlyUrlRepositorySettings{} + + return r +} diff --git a/typedapi/types/realmcache.go b/typedapi/types/realmcache.go index 2b39f366c5..25006cea1b 100644 --- a/typedapi/types/realmcache.go +++ b/typedapi/types/realmcache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RealmCache type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L266-L268 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L266-L268 type RealmCache struct { Size int64 `json:"size"` } @@ -57,7 +58,7 @@ func (s *RealmCache) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = value case float64: diff --git a/typedapi/types/realminfo.go b/typedapi/types/realminfo.go index 4f932bb3c4..5bd7a70f51 100644 --- a/typedapi/types/realminfo.go +++ b/typedapi/types/realminfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RealmInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/RealmInfo.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/RealmInfo.ts#L22-L25 type RealmInfo struct { Name string `json:"name"` Type string `json:"type"` @@ -53,13 +54,13 @@ func (s *RealmInfo) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/recording.go b/typedapi/types/recording.go index 26f27fd106..ac4438bc1a 100644 --- a/typedapi/types/recording.go +++ b/typedapi/types/recording.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Recording type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L225-L230 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L225-L230 type Recording struct { CumulativeExecutionCount *int64 `json:"cumulative_execution_count,omitempty"` CumulativeExecutionTime Duration `json:"cumulative_execution_time,omitempty"` @@ -60,7 +61,7 @@ func (s *Recording) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CumulativeExecutionCount", err) } s.CumulativeExecutionCount = &value case float64: @@ -70,18 +71,18 @@ func (s *Recording) UnmarshalJSON(data []byte) error { case "cumulative_execution_time": if err := dec.Decode(&s.CumulativeExecutionTime); err != nil { - return err + return fmt.Errorf("%s | %w", "CumulativeExecutionTime", err) } case "cumulative_execution_time_millis": if err := dec.Decode(&s.CumulativeExecutionTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "CumulativeExecutionTimeMillis", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/recoverybytes.go b/typedapi/types/recoverybytes.go index d7cdc3f103..ee3e5db86f 100644 --- a/typedapi/types/recoverybytes.go +++ b/typedapi/types/recoverybytes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RecoveryBytes type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/recovery/types.ts#L38-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/recovery/types.ts#L38-L48 type RecoveryBytes struct { Percent Percentage `json:"percent"` Recovered ByteSize `json:"recovered,omitempty"` @@ -59,47 +60,47 @@ func (s *RecoveryBytes) UnmarshalJSON(data []byte) error { case "percent": if err := dec.Decode(&s.Percent); err != nil { - return err + return fmt.Errorf("%s | %w", "Percent", err) } case "recovered": if err := dec.Decode(&s.Recovered); err != nil { - return err + return fmt.Errorf("%s | %w", "Recovered", err) } case "recovered_from_snapshot": if err := dec.Decode(&s.RecoveredFromSnapshot); err != nil { - return err + return fmt.Errorf("%s | %w", "RecoveredFromSnapshot", err) } case "recovered_from_snapshot_in_bytes": if err := dec.Decode(&s.RecoveredFromSnapshotInBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "RecoveredFromSnapshotInBytes", err) } case "recovered_in_bytes": if err := dec.Decode(&s.RecoveredInBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "RecoveredInBytes", err) } case "reused": if err := dec.Decode(&s.Reused); err != nil { - return err + return fmt.Errorf("%s | %w", "Reused", err) } case "reused_in_bytes": if err := dec.Decode(&s.ReusedInBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "ReusedInBytes", err) } case "total": if err := dec.Decode(&s.Total); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } case "total_in_bytes": if err := dec.Decode(&s.TotalInBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalInBytes", err) } } diff --git a/typedapi/types/recoveryfiles.go b/typedapi/types/recoveryfiles.go index 60276ed932..2fc9c4815b 100644 --- a/typedapi/types/recoveryfiles.go +++ b/typedapi/types/recoveryfiles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RecoveryFiles type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/recovery/types.ts#L56-L62 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/recovery/types.ts#L56-L62 type RecoveryFiles struct { Details []FileDetails `json:"details,omitempty"` Percent Percentage `json:"percent"` @@ -56,12 +57,12 @@ func (s *RecoveryFiles) UnmarshalJSON(data []byte) error { case "details": if err := dec.Decode(&s.Details); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } case "percent": if err := dec.Decode(&s.Percent); err != nil { - return err + return fmt.Errorf("%s | %w", "Percent", err) } case "recovered": @@ -71,7 +72,7 @@ func (s *RecoveryFiles) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Recovered", err) } s.Recovered = value case float64: @@ -86,7 +87,7 @@ func (s *RecoveryFiles) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Reused", err) } s.Reused = value case float64: @@ -101,7 +102,7 @@ func (s *RecoveryFiles) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: diff --git a/typedapi/types/recoveryindexstatus.go b/typedapi/types/recoveryindexstatus.go index f63402234d..5517f80c04 100644 --- a/typedapi/types/recoveryindexstatus.go +++ b/typedapi/types/recoveryindexstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RecoveryIndexStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/recovery/types.ts#L64-L74 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/recovery/types.ts#L64-L74 type RecoveryIndexStatus struct { Bytes *RecoveryBytes `json:"bytes,omitempty"` Files RecoveryFiles `json:"files"` @@ -59,47 +60,47 @@ func (s *RecoveryIndexStatus) UnmarshalJSON(data []byte) error { case "bytes": if err := dec.Decode(&s.Bytes); err != nil { - return err + return fmt.Errorf("%s | %w", "Bytes", err) } case "files": if err := dec.Decode(&s.Files); err != nil { - return err + return fmt.Errorf("%s | %w", "Files", err) } case "size": if err := dec.Decode(&s.Size); err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } case "source_throttle_time": if err := dec.Decode(&s.SourceThrottleTime); err != nil { - return err + return fmt.Errorf("%s | %w", "SourceThrottleTime", err) } case "source_throttle_time_in_millis": if err := dec.Decode(&s.SourceThrottleTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "SourceThrottleTimeInMillis", err) } case "target_throttle_time": if err := dec.Decode(&s.TargetThrottleTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetThrottleTime", err) } case "target_throttle_time_in_millis": if err := dec.Decode(&s.TargetThrottleTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetThrottleTimeInMillis", err) } case "total_time": if err := dec.Decode(&s.TotalTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTime", err) } case "total_time_in_millis": if err := dec.Decode(&s.TotalTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTimeInMillis", err) } } diff --git a/typedapi/types/recoveryorigin.go b/typedapi/types/recoveryorigin.go index 0c060bac38..6cf7667d97 100644 --- a/typedapi/types/recoveryorigin.go +++ b/typedapi/types/recoveryorigin.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RecoveryOrigin type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/recovery/types.ts#L76-L89 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/recovery/types.ts#L76-L89 type RecoveryOrigin struct { BootstrapNewHistoryUuid *bool `json:"bootstrap_new_history_uuid,omitempty"` Host *string `json:"host,omitempty"` @@ -68,7 +69,7 @@ func (s *RecoveryOrigin) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "BootstrapNewHistoryUuid", err) } s.BootstrapNewHistoryUuid = &value case bool: @@ -77,13 +78,13 @@ func (s *RecoveryOrigin) UnmarshalJSON(data []byte) error { case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "hostname": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Hostname", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -94,47 +95,47 @@ func (s *RecoveryOrigin) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "ip": if err := dec.Decode(&s.Ip); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "repository": if err := dec.Decode(&s.Repository); err != nil { - return err + return fmt.Errorf("%s | %w", "Repository", err) } case "restoreUUID": if err := dec.Decode(&s.RestoreUUID); err != nil { - return err + return fmt.Errorf("%s | %w", "RestoreUUID", err) } case "snapshot": if err := dec.Decode(&s.Snapshot); err != nil { - return err + return fmt.Errorf("%s | %w", "Snapshot", err) } case "transport_address": if err := dec.Decode(&s.TransportAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "TransportAddress", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/recoveryrecord.go b/typedapi/types/recoveryrecord.go index 0a98656155..a1f6834286 100644 --- a/typedapi/types/recoveryrecord.go +++ b/typedapi/types/recoveryrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RecoveryRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/recovery/types.ts#L24-L155 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/recovery/types.ts#L24-L155 type RecoveryRecord struct { // Bytes The number of bytes to recover. Bytes *string `json:"bytes,omitempty"` @@ -104,7 +105,7 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "bytes", "b": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Bytes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -115,13 +116,13 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "bytes_percent", "bp": if err := dec.Decode(&s.BytesPercent); err != nil { - return err + return fmt.Errorf("%s | %w", "BytesPercent", err) } case "bytes_recovered", "br": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BytesRecovered", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -133,7 +134,7 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "bytes_total", "tb": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BytesTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -145,7 +146,7 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "files", "f": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Files", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -156,13 +157,13 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "files_percent", "fp": if err := dec.Decode(&s.FilesPercent); err != nil { - return err + return fmt.Errorf("%s | %w", "FilesPercent", err) } case "files_recovered", "fr": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FilesRecovered", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -174,7 +175,7 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "files_total", "tf": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FilesTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -185,13 +186,13 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "index", "i", "idx": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "repository", "rep": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Repository", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -203,7 +204,7 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "shard", "s", "sh": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Shard", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -215,7 +216,7 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "snapshot", "snap": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Snapshot", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -227,7 +228,7 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "source_host", "shost": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SourceHost", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -239,7 +240,7 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "source_node", "snode": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SourceNode", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -251,7 +252,7 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "stage", "st": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Stage", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -262,28 +263,28 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "start_time", "start": if err := dec.Decode(&s.StartTime); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTime", err) } case "start_time_millis", "start_millis": if err := dec.Decode(&s.StartTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTimeMillis", err) } case "stop_time", "stop": if err := dec.Decode(&s.StopTime); err != nil { - return err + return fmt.Errorf("%s | %w", "StopTime", err) } case "stop_time_millis", "stop_millis": if err := dec.Decode(&s.StopTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StopTimeMillis", err) } case "target_host", "thost": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetHost", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -295,7 +296,7 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "target_node", "tnode": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetNode", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -306,13 +307,13 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "time", "t", "ti": if err := dec.Decode(&s.Time); err != nil { - return err + return fmt.Errorf("%s | %w", "Time", err) } case "translog_ops", "to": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TranslogOps", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -323,13 +324,13 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "translog_ops_percent", "top": if err := dec.Decode(&s.TranslogOpsPercent); err != nil { - return err + return fmt.Errorf("%s | %w", "TranslogOpsPercent", err) } case "translog_ops_recovered", "tor": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TranslogOpsRecovered", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -341,7 +342,7 @@ func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { case "type", "ty": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/recoverystartstatus.go b/typedapi/types/recoverystartstatus.go index 6de321a419..719b26d013 100644 --- a/typedapi/types/recoverystartstatus.go +++ b/typedapi/types/recoverystartstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RecoveryStartStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/recovery/types.ts#L91-L96 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/recovery/types.ts#L91-L96 type RecoveryStartStatus struct { CheckIndexTime Duration `json:"check_index_time,omitempty"` CheckIndexTimeInMillis int64 `json:"check_index_time_in_millis"` @@ -54,22 +55,22 @@ func (s *RecoveryStartStatus) UnmarshalJSON(data []byte) error { case "check_index_time": if err := dec.Decode(&s.CheckIndexTime); err != nil { - return err + return fmt.Errorf("%s | %w", "CheckIndexTime", err) } case "check_index_time_in_millis": if err := dec.Decode(&s.CheckIndexTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "CheckIndexTimeInMillis", err) } case "total_time": if err := dec.Decode(&s.TotalTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTime", err) } case "total_time_in_millis": if err := dec.Decode(&s.TotalTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTimeInMillis", err) } } diff --git a/typedapi/types/recoverystats.go b/typedapi/types/recoverystats.go index 61e687b8e2..eb771efd86 100644 --- a/typedapi/types/recoverystats.go +++ b/typedapi/types/recoverystats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RecoveryStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L228-L233 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L228-L233 type RecoveryStats struct { CurrentAsSource int64 `json:"current_as_source"` CurrentAsTarget int64 `json:"current_as_target"` @@ -60,7 +61,7 @@ func (s *RecoveryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CurrentAsSource", err) } s.CurrentAsSource = value case float64: @@ -75,7 +76,7 @@ func (s *RecoveryStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CurrentAsTarget", err) } s.CurrentAsTarget = value case float64: @@ -85,12 +86,12 @@ func (s *RecoveryStats) UnmarshalJSON(data []byte) error { case "throttle_time": if err := dec.Decode(&s.ThrottleTime); err != nil { - return err + return fmt.Errorf("%s | %w", "ThrottleTime", err) } case "throttle_time_in_millis": if err := dec.Decode(&s.ThrottleTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ThrottleTimeInMillis", err) } } diff --git a/typedapi/types/recoverystatus.go b/typedapi/types/recoverystatus.go index 63072cd102..7d9c39bcf2 100644 --- a/typedapi/types/recoverystatus.go +++ b/typedapi/types/recoverystatus.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // RecoveryStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/recovery/types.ts#L98-L100 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/recovery/types.ts#L98-L100 type RecoveryStatus struct { Shards []ShardRecovery `json:"shards"` } diff --git a/typedapi/types/refreshstats.go b/typedapi/types/refreshstats.go index dd7e78848d..0c460458cf 100644 --- a/typedapi/types/refreshstats.go +++ b/typedapi/types/refreshstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RefreshStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L235-L242 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L235-L242 type RefreshStats struct { ExternalTotal int64 `json:"external_total"` ExternalTotalTimeInMillis int64 `json:"external_total_time_in_millis"` @@ -62,7 +63,7 @@ func (s *RefreshStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ExternalTotal", err) } s.ExternalTotal = value case float64: @@ -72,7 +73,7 @@ func (s *RefreshStats) UnmarshalJSON(data []byte) error { case "external_total_time_in_millis": if err := dec.Decode(&s.ExternalTotalTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ExternalTotalTimeInMillis", err) } case "listeners": @@ -82,7 +83,7 @@ func (s *RefreshStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Listeners", err) } s.Listeners = value case float64: @@ -97,7 +98,7 @@ func (s *RefreshStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: @@ -107,12 +108,12 @@ func (s *RefreshStats) UnmarshalJSON(data []byte) error { case "total_time": if err := dec.Decode(&s.TotalTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTime", err) } case "total_time_in_millis": if err := dec.Decode(&s.TotalTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTimeInMillis", err) } } diff --git a/typedapi/types/regexoptions.go b/typedapi/types/regexoptions.go index e2502a5367..4cd4972d39 100644 --- a/typedapi/types/regexoptions.go +++ b/typedapi/types/regexoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RegexOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L180-L191 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L180-L191 type RegexOptions struct { // Flags Optional operators for the regular expression. Flags string `json:"flags,omitempty"` @@ -56,7 +57,7 @@ func (s *RegexOptions) UnmarshalJSON(data []byte) error { case "flags": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Flags", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -73,7 +74,7 @@ func (s *RegexOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxDeterminizedStates", err) } s.MaxDeterminizedStates = &value case float64: diff --git a/typedapi/types/regexpquery.go b/typedapi/types/regexpquery.go index a9f2f9c7b0..71250e29ea 100644 --- a/typedapi/types/regexpquery.go +++ b/typedapi/types/regexpquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RegexpQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L185-L215 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L185-L215 type RegexpQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -85,7 +86,7 @@ func (s *RegexpQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -101,7 +102,7 @@ func (s *RegexpQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CaseInsensitive", err) } s.CaseInsensitive = &value case bool: @@ -111,7 +112,7 @@ func (s *RegexpQuery) UnmarshalJSON(data []byte) error { case "flags": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Flags", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -128,7 +129,7 @@ func (s *RegexpQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxDeterminizedStates", err) } s.MaxDeterminizedStates = &value case float64: @@ -139,7 +140,7 @@ func (s *RegexpQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -150,13 +151,13 @@ func (s *RegexpQuery) UnmarshalJSON(data []byte) error { case "rewrite": if err := dec.Decode(&s.Rewrite); err != nil { - return err + return fmt.Errorf("%s | %w", "Rewrite", err) } case "value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/regressioninferenceoptions.go b/typedapi/types/regressioninferenceoptions.go index 737726da34..92f3e92742 100644 --- a/typedapi/types/regressioninferenceoptions.go +++ b/typedapi/types/regressioninferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RegressionInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L82-L91 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L82-L91 type RegressionInferenceOptions struct { // NumTopFeatureImportanceValues Specifies the maximum number of feature importance values per document. NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` @@ -62,7 +63,7 @@ func (s *RegressionInferenceOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopFeatureImportanceValues", err) } s.NumTopFeatureImportanceValues = &value case float64: @@ -72,7 +73,7 @@ func (s *RegressionInferenceOptions) UnmarshalJSON(data []byte) error { case "results_field": if err := dec.Decode(&s.ResultsField); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } } diff --git a/typedapi/types/reindexdestination.go b/typedapi/types/reindexdestination.go index 52272017a9..4df030fa54 100644 --- a/typedapi/types/reindexdestination.go +++ b/typedapi/types/reindexdestination.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // ReindexDestination type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/reindex/types.ts#L39-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/reindex/types.ts#L39-L64 type ReindexDestination struct { // Index The name of the data stream, index, or index alias you are copying to. Index string `json:"index"` @@ -69,18 +70,18 @@ func (s *ReindexDestination) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "op_type": if err := dec.Decode(&s.OpType); err != nil { - return err + return fmt.Errorf("%s | %w", "OpType", err) } case "pipeline": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pipeline", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -91,12 +92,12 @@ func (s *ReindexDestination) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "version_type": if err := dec.Decode(&s.VersionType); err != nil { - return err + return fmt.Errorf("%s | %w", "VersionType", err) } } diff --git a/typedapi/types/reindexnode.go b/typedapi/types/reindexnode.go index 41b99b3b24..32833f28fc 100644 --- a/typedapi/types/reindexnode.go +++ b/typedapi/types/reindexnode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/noderole" @@ -31,7 +32,7 @@ import ( // ReindexNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/reindex_rethrottle/types.ts#L33-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/reindex_rethrottle/types.ts#L33-L35 type ReindexNode struct { Attributes map[string]string `json:"attributes"` Host string `json:"host"` @@ -62,27 +63,27 @@ func (s *ReindexNode) UnmarshalJSON(data []byte) error { s.Attributes = make(map[string]string, 0) } if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "ip": if err := dec.Decode(&s.Ip); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "tasks": @@ -90,12 +91,12 @@ func (s *ReindexNode) UnmarshalJSON(data []byte) error { s.Tasks = make(map[string]ReindexTask, 0) } if err := dec.Decode(&s.Tasks); err != nil { - return err + return fmt.Errorf("%s | %w", "Tasks", err) } case "transport_address": if err := dec.Decode(&s.TransportAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "TransportAddress", err) } } diff --git a/typedapi/types/reindexsource.go b/typedapi/types/reindexsource.go index 4437a9415e..a0a1bb83c9 100644 --- a/typedapi/types/reindexsource.go +++ b/typedapi/types/reindexsource.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ReindexSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/reindex/types.ts#L66-L97 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/reindex/types.ts#L66-L97 type ReindexSource struct { // Index The name of the data stream, index, or alias you are copying from. // Accepts a comma-separated list to reindex from multiple sources. @@ -74,29 +75,29 @@ func (s *ReindexSource) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = append(s.Index, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "remote": if err := dec.Decode(&s.Remote); err != nil { - return err + return fmt.Errorf("%s | %w", "Remote", err) } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } case "size": @@ -107,7 +108,7 @@ func (s *ReindexSource) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -117,7 +118,7 @@ func (s *ReindexSource) UnmarshalJSON(data []byte) error { case "slice": if err := dec.Decode(&s.Slice); err != nil { - return err + return fmt.Errorf("%s | %w", "Slice", err) } case "sort": @@ -126,13 +127,13 @@ func (s *ReindexSource) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(SortCombinations) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } s.Sort = append(s.Sort, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } } @@ -142,13 +143,13 @@ func (s *ReindexSource) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "SourceFields_", err) } s.SourceFields_ = append(s.SourceFields_, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.SourceFields_); err != nil { - return err + return fmt.Errorf("%s | %w", "SourceFields_", err) } } diff --git a/typedapi/types/reindexstatus.go b/typedapi/types/reindexstatus.go index 80783605ae..3bc23dd780 100644 --- a/typedapi/types/reindexstatus.go +++ b/typedapi/types/reindexstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ReindexStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/reindex_rethrottle/types.ts#L37-L85 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/reindex_rethrottle/types.ts#L37-L85 type ReindexStatus struct { // Batches The number of scroll responses pulled back by the reindex. Batches int64 `json:"batches"` @@ -86,7 +87,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Batches", err) } s.Batches = value case float64: @@ -101,7 +102,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Created", err) } s.Created = value case float64: @@ -116,7 +117,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Deleted", err) } s.Deleted = value case float64: @@ -131,7 +132,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Noops", err) } s.Noops = value case float64: @@ -146,7 +147,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "RequestsPerSecond", err) } f := float32(value) s.RequestsPerSecond = f @@ -157,27 +158,27 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { case "retries": if err := dec.Decode(&s.Retries); err != nil { - return err + return fmt.Errorf("%s | %w", "Retries", err) } case "throttled": if err := dec.Decode(&s.Throttled); err != nil { - return err + return fmt.Errorf("%s | %w", "Throttled", err) } case "throttled_millis": if err := dec.Decode(&s.ThrottledMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ThrottledMillis", err) } case "throttled_until": if err := dec.Decode(&s.ThrottledUntil); err != nil { - return err + return fmt.Errorf("%s | %w", "ThrottledUntil", err) } case "throttled_until_millis": if err := dec.Decode(&s.ThrottledUntilMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ThrottledUntilMillis", err) } case "total": @@ -187,7 +188,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: @@ -202,7 +203,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Updated", err) } s.Updated = value case float64: @@ -217,7 +218,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "VersionConflicts", err) } s.VersionConflicts = value case float64: diff --git a/typedapi/types/reindextask.go b/typedapi/types/reindextask.go index 7b679c1204..f05699758d 100644 --- a/typedapi/types/reindextask.go +++ b/typedapi/types/reindextask.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ReindexTask type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/reindex_rethrottle/types.ts#L87-L98 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/reindex_rethrottle/types.ts#L87-L98 type ReindexTask struct { Action string `json:"action"` Cancellable bool `json:"cancellable"` @@ -62,7 +63,7 @@ func (s *ReindexTask) UnmarshalJSON(data []byte) error { case "action": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Action", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,7 +79,7 @@ func (s *ReindexTask) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Cancellable", err) } s.Cancellable = value case bool: @@ -88,7 +89,7 @@ func (s *ReindexTask) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -99,7 +100,7 @@ func (s *ReindexTask) UnmarshalJSON(data []byte) error { case "headers": if err := dec.Decode(&s.Headers); err != nil { - return err + return fmt.Errorf("%s | %w", "Headers", err) } case "id": @@ -109,7 +110,7 @@ func (s *ReindexTask) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } s.Id = value case float64: @@ -119,28 +120,28 @@ func (s *ReindexTask) UnmarshalJSON(data []byte) error { case "node": if err := dec.Decode(&s.Node); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } case "running_time_in_nanos": if err := dec.Decode(&s.RunningTimeInNanos); err != nil { - return err + return fmt.Errorf("%s | %w", "RunningTimeInNanos", err) } case "start_time_in_millis": if err := dec.Decode(&s.StartTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTimeInMillis", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/reloaddetails.go b/typedapi/types/reloaddetails.go index d32e2bce96..7716e79621 100644 --- a/typedapi/types/reloaddetails.go +++ b/typedapi/types/reloaddetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ReloadDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/reload_search_analyzers/types.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/reload_search_analyzers/types.ts#L27-L31 type ReloadDetails struct { Index string `json:"index"` ReloadedAnalyzers []string `json:"reloaded_analyzers"` @@ -55,7 +56,7 @@ func (s *ReloadDetails) UnmarshalJSON(data []byte) error { case "index": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,12 +67,12 @@ func (s *ReloadDetails) UnmarshalJSON(data []byte) error { case "reloaded_analyzers": if err := dec.Decode(&s.ReloadedAnalyzers); err != nil { - return err + return fmt.Errorf("%s | %w", "ReloadedAnalyzers", err) } case "reloaded_node_ids": if err := dec.Decode(&s.ReloadedNodeIds); err != nil { - return err + return fmt.Errorf("%s | %w", "ReloadedNodeIds", err) } } diff --git a/typedapi/types/reloadresult.go b/typedapi/types/reloadresult.go index 536207ce24..0342d710c0 100644 --- a/typedapi/types/reloadresult.go +++ b/typedapi/types/reloadresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ReloadResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/reload_search_analyzers/types.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/reload_search_analyzers/types.ts#L22-L25 type ReloadResult struct { ReloadDetails []ReloadDetails `json:"reload_details"` Shards_ ShardStatistics `json:"_shards"` diff --git a/typedapi/types/relocationfailureinfo.go b/typedapi/types/relocationfailureinfo.go index 21afe96905..c85e55e337 100644 --- a/typedapi/types/relocationfailureinfo.go +++ b/typedapi/types/relocationfailureinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RelocationFailureInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Node.ts#L73-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Node.ts#L73-L75 type RelocationFailureInfo struct { FailedAttempts int `json:"failed_attempts"` } @@ -58,7 +59,7 @@ func (s *RelocationFailureInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FailedAttempts", err) } s.FailedAttempts = value case float64: diff --git a/typedapi/types/remotesource.go b/typedapi/types/remotesource.go index 5871e507f7..e81e3a08b8 100644 --- a/typedapi/types/remotesource.go +++ b/typedapi/types/remotesource.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RemoteSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/reindex/types.ts#L99-L125 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/reindex/types.ts#L99-L125 type RemoteSource struct { // ConnectTimeout The remote connection timeout. // Defaults to 30 seconds. @@ -63,7 +64,7 @@ func (s *RemoteSource) UnmarshalJSON(data []byte) error { case "connect_timeout": if err := dec.Decode(&s.ConnectTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "ConnectTimeout", err) } case "headers": @@ -71,27 +72,27 @@ func (s *RemoteSource) UnmarshalJSON(data []byte) error { s.Headers = make(map[string]string, 0) } if err := dec.Decode(&s.Headers); err != nil { - return err + return fmt.Errorf("%s | %w", "Headers", err) } case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "password": if err := dec.Decode(&s.Password); err != nil { - return err + return fmt.Errorf("%s | %w", "Password", err) } case "socket_timeout": if err := dec.Decode(&s.SocketTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "SocketTimeout", err) } case "username": if err := dec.Decode(&s.Username); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } } diff --git a/typedapi/types/removeaction.go b/typedapi/types/removeaction.go index 8c81aea6b6..f6bb43fa2e 100644 --- a/typedapi/types/removeaction.go +++ b/typedapi/types/removeaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RemoveAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/update_aliases/types.ts#L97-L122 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/update_aliases/types.ts#L97-L122 type RemoveAction struct { // Alias Alias for the action. // Index alias names support date math. @@ -65,7 +66,7 @@ func (s *RemoveAction) UnmarshalJSON(data []byte) error { case "alias": if err := dec.Decode(&s.Alias); err != nil { - return err + return fmt.Errorf("%s | %w", "Alias", err) } case "aliases": @@ -74,19 +75,19 @@ func (s *RemoveAction) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aliases", err) } s.Aliases = append(s.Aliases, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Aliases); err != nil { - return err + return fmt.Errorf("%s | %w", "Aliases", err) } } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "indices": @@ -95,13 +96,13 @@ func (s *RemoveAction) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } @@ -112,7 +113,7 @@ func (s *RemoveAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MustExist", err) } s.MustExist = &value case bool: diff --git a/typedapi/types/removeduplicatestokenfilter.go b/typedapi/types/removeduplicatestokenfilter.go index b99207c8e6..14c6042363 100644 --- a/typedapi/types/removeduplicatestokenfilter.go +++ b/typedapi/types/removeduplicatestokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RemoveDuplicatesTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L301-L303 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L301-L303 type RemoveDuplicatesTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` @@ -52,12 +53,12 @@ func (s *RemoveDuplicatesTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/removeindexaction.go b/typedapi/types/removeindexaction.go index 594a4f3122..99ace6c5b2 100644 --- a/typedapi/types/removeindexaction.go +++ b/typedapi/types/removeindexaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RemoveIndexAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/update_aliases/types.ts#L124-L139 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/update_aliases/types.ts#L124-L139 type RemoveIndexAction struct { // Index Data stream or index for the action. // Supports wildcards (`*`). @@ -59,7 +60,7 @@ func (s *RemoveIndexAction) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "indices": @@ -68,13 +69,13 @@ func (s *RemoveIndexAction) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } @@ -85,7 +86,7 @@ func (s *RemoveIndexAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MustExist", err) } s.MustExist = &value case bool: diff --git a/typedapi/types/removeprocessor.go b/typedapi/types/removeprocessor.go index c514152f0f..9f014700f1 100644 --- a/typedapi/types/removeprocessor.go +++ b/typedapi/types/removeprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RemoveProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L941-L951 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L941-L951 type RemoveProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -69,7 +70,7 @@ func (s *RemoveProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,20 +85,20 @@ func (s *RemoveProcessor) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } s.Field = append(s.Field, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -113,7 +114,7 @@ func (s *RemoveProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -127,7 +128,7 @@ func (s *RemoveProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -136,13 +137,13 @@ func (s *RemoveProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/renameprocessor.go b/typedapi/types/renameprocessor.go index 3327c326b3..eaf2c4efe3 100644 --- a/typedapi/types/renameprocessor.go +++ b/typedapi/types/renameprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RenameProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L953-L969 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L953-L969 type RenameProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -73,7 +74,7 @@ func (s *RenameProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,13 +85,13 @@ func (s *RenameProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -106,7 +107,7 @@ func (s *RenameProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -120,7 +121,7 @@ func (s *RenameProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -129,13 +130,13 @@ func (s *RenameProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -146,7 +147,7 @@ func (s *RenameProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/reportingemailattachment.go b/typedapi/types/reportingemailattachment.go index a48050b9d0..45b3bf36ee 100644 --- a/typedapi/types/reportingemailattachment.go +++ b/typedapi/types/reportingemailattachment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ReportingEmailAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L224-L232 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L224-L232 type ReportingEmailAttachment struct { Inline *bool `json:"inline,omitempty"` Interval Duration `json:"interval,omitempty"` @@ -61,7 +62,7 @@ func (s *ReportingEmailAttachment) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Inline", err) } s.Inline = &value case bool: @@ -70,12 +71,12 @@ func (s *ReportingEmailAttachment) UnmarshalJSON(data []byte) error { case "interval": if err := dec.Decode(&s.Interval); err != nil { - return err + return fmt.Errorf("%s | %w", "Interval", err) } case "request": if err := dec.Decode(&s.Request); err != nil { - return err + return fmt.Errorf("%s | %w", "Request", err) } case "retries": @@ -86,7 +87,7 @@ func (s *ReportingEmailAttachment) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Retries", err) } s.Retries = &value case float64: @@ -97,7 +98,7 @@ func (s *ReportingEmailAttachment) UnmarshalJSON(data []byte) error { case "url": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Url", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/repositoriesrecord.go b/typedapi/types/repositoriesrecord.go index c54d34a56f..0e7e9eb409 100644 --- a/typedapi/types/repositoriesrecord.go +++ b/typedapi/types/repositoriesrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RepositoriesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/repositories/types.ts#L20-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/repositories/types.ts#L20-L31 type RepositoriesRecord struct { // Id The unique repository identifier. Id *string `json:"id,omitempty"` @@ -56,7 +57,7 @@ func (s *RepositoriesRecord) UnmarshalJSON(data []byte) error { case "id", "repoId": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,7 +69,7 @@ func (s *RepositoriesRecord) UnmarshalJSON(data []byte) error { case "type", "t": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/repository.go b/typedapi/types/repository.go index c350cf91a3..920253cb6b 100644 --- a/typedapi/types/repository.go +++ b/typedapi/types/repository.go @@ -16,72 +16,18 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types -import ( - "bytes" - "encoding/json" - "errors" - "io" - "strconv" -) - -// Repository type. +// Repository holds the union for the following types: // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotRepository.ts#L23-L27 -type Repository struct { - Settings RepositorySettings `json:"settings"` - Type string `json:"type"` - Uuid *string `json:"uuid,omitempty"` -} - -func (s *Repository) UnmarshalJSON(data []byte) error { - - dec := json.NewDecoder(bytes.NewReader(data)) - - for { - t, err := dec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "settings": - if err := dec.Decode(&s.Settings); err != nil { - return err - } - - case "type": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return err - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Type = o - - case "uuid": - if err := dec.Decode(&s.Uuid); err != nil { - return err - } - - } - } - return nil -} - -// NewRepository returns a Repository. -func NewRepository() *Repository { - r := &Repository{} - - return r -} +// AzureRepository +// GcsRepository +// S3Repository +// SharedFileSystemRepository +// ReadOnlyUrlRepository +// SourceOnlyRepository +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotRepository.ts#L24-L34 +type Repository interface{} diff --git a/typedapi/types/repositoryintegrityindicator.go b/typedapi/types/repositoryintegrityindicator.go index f9a9896361..53c27a0dee 100644 --- a/typedapi/types/repositoryintegrityindicator.go +++ b/typedapi/types/repositoryintegrityindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // RepositoryIntegrityIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L134-L138 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L134-L138 type RepositoryIntegrityIndicator struct { Details *RepositoryIntegrityIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` @@ -58,28 +59,28 @@ func (s *RepositoryIntegrityIndicator) UnmarshalJSON(data []byte) error { case "details": if err := dec.Decode(&s.Details); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } case "diagnosis": if err := dec.Decode(&s.Diagnosis); err != nil { - return err + return fmt.Errorf("%s | %w", "Diagnosis", err) } case "impacts": if err := dec.Decode(&s.Impacts); err != nil { - return err + return fmt.Errorf("%s | %w", "Impacts", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "symptom": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Symptom", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/repositoryintegrityindicatordetails.go b/typedapi/types/repositoryintegrityindicatordetails.go index 21077dc173..fe65fc770d 100644 --- a/typedapi/types/repositoryintegrityindicatordetails.go +++ b/typedapi/types/repositoryintegrityindicatordetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RepositoryIntegrityIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L139-L143 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L139-L143 type RepositoryIntegrityIndicatorDetails struct { Corrupted []string `json:"corrupted,omitempty"` CorruptedRepositories *int64 `json:"corrupted_repositories,omitempty"` @@ -54,7 +55,7 @@ func (s *RepositoryIntegrityIndicatorDetails) UnmarshalJSON(data []byte) error { case "corrupted": if err := dec.Decode(&s.Corrupted); err != nil { - return err + return fmt.Errorf("%s | %w", "Corrupted", err) } case "corrupted_repositories": @@ -64,7 +65,7 @@ func (s *RepositoryIntegrityIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CorruptedRepositories", err) } s.CorruptedRepositories = &value case float64: @@ -79,7 +80,7 @@ func (s *RepositoryIntegrityIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalRepositories", err) } s.TotalRepositories = &value case float64: diff --git a/typedapi/types/repositorylocation.go b/typedapi/types/repositorylocation.go index 91d929cf0f..c4c9162992 100644 --- a/typedapi/types/repositorylocation.go +++ b/typedapi/types/repositorylocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RepositoryLocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/RepositoryMeteringInformation.ts#L68-L74 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/RepositoryMeteringInformation.ts#L68-L74 type RepositoryLocation struct { BasePath string `json:"base_path"` // Bucket Bucket name (GCP, S3) @@ -57,7 +58,7 @@ func (s *RepositoryLocation) UnmarshalJSON(data []byte) error { case "base_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BasePath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -69,7 +70,7 @@ func (s *RepositoryLocation) UnmarshalJSON(data []byte) error { case "bucket": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Bucket", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -81,7 +82,7 @@ func (s *RepositoryLocation) UnmarshalJSON(data []byte) error { case "container": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Container", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/repositorymeteringinformation.go b/typedapi/types/repositorymeteringinformation.go index c6450733e0..c745bb8070 100644 --- a/typedapi/types/repositorymeteringinformation.go +++ b/typedapi/types/repositorymeteringinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RepositoryMeteringInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/RepositoryMeteringInformation.ts#L24-L66 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/RepositoryMeteringInformation.ts#L24-L66 type RepositoryMeteringInformation struct { // Archived A flag that tells whether or not this object has been archived. When a // repository is closed or updated the @@ -88,7 +89,7 @@ func (s *RepositoryMeteringInformation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Archived", err) } s.Archived = value case bool: @@ -97,38 +98,38 @@ func (s *RepositoryMeteringInformation) UnmarshalJSON(data []byte) error { case "cluster_version": if err := dec.Decode(&s.ClusterVersion); err != nil { - return err + return fmt.Errorf("%s | %w", "ClusterVersion", err) } case "repository_ephemeral_id": if err := dec.Decode(&s.RepositoryEphemeralId); err != nil { - return err + return fmt.Errorf("%s | %w", "RepositoryEphemeralId", err) } case "repository_location": if err := dec.Decode(&s.RepositoryLocation); err != nil { - return err + return fmt.Errorf("%s | %w", "RepositoryLocation", err) } case "repository_name": if err := dec.Decode(&s.RepositoryName); err != nil { - return err + return fmt.Errorf("%s | %w", "RepositoryName", err) } case "repository_started_at": if err := dec.Decode(&s.RepositoryStartedAt); err != nil { - return err + return fmt.Errorf("%s | %w", "RepositoryStartedAt", err) } case "repository_stopped_at": if err := dec.Decode(&s.RepositoryStoppedAt); err != nil { - return err + return fmt.Errorf("%s | %w", "RepositoryStoppedAt", err) } case "repository_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RepositoryType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -139,7 +140,7 @@ func (s *RepositoryMeteringInformation) UnmarshalJSON(data []byte) error { case "request_counts": if err := dec.Decode(&s.RequestCounts); err != nil { - return err + return fmt.Errorf("%s | %w", "RequestCounts", err) } } diff --git a/typedapi/types/repositorysettings.go b/typedapi/types/repositorysettings.go deleted file mode 100644 index 765973c74b..0000000000 --- a/typedapi/types/repositorysettings.go +++ /dev/null @@ -1,127 +0,0 @@ -// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 - -package types - -import ( - "bytes" - "encoding/json" - "errors" - "io" - "strconv" -) - -// RepositorySettings type. -// -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotRepository.ts#L29-L38 -type RepositorySettings struct { - ChunkSize *string `json:"chunk_size,omitempty"` - Compress string `json:"compress,omitempty"` - ConcurrentStreams string `json:"concurrent_streams,omitempty"` - Location string `json:"location"` - ReadOnly string `json:"read_only,omitempty"` -} - -func (s *RepositorySettings) UnmarshalJSON(data []byte) error { - - dec := json.NewDecoder(bytes.NewReader(data)) - - for { - t, err := dec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "chunk_size": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return err - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.ChunkSize = &o - - case "compress": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return err - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Compress = o - - case "concurrent_streams": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return err - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.ConcurrentStreams = o - - case "location": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return err - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Location = o - - case "read_only", "readonly": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return err - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.ReadOnly = o - - } - } - return nil -} - -// NewRepositorySettings returns a RepositorySettings. -func NewRepositorySettings() *RepositorySettings { - r := &RepositorySettings{} - - return r -} diff --git a/typedapi/types/requestcachestats.go b/typedapi/types/requestcachestats.go index e5a83ce384..83d49d1cc2 100644 --- a/typedapi/types/requestcachestats.go +++ b/typedapi/types/requestcachestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RequestCacheStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L244-L250 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L244-L250 type RequestCacheStats struct { Evictions int64 `json:"evictions"` HitCount int64 `json:"hit_count"` @@ -61,7 +62,7 @@ func (s *RequestCacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Evictions", err) } s.Evictions = value case float64: @@ -76,7 +77,7 @@ func (s *RequestCacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "HitCount", err) } s.HitCount = value case float64: @@ -87,7 +88,7 @@ func (s *RequestCacheStats) UnmarshalJSON(data []byte) error { case "memory_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MemorySize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -103,7 +104,7 @@ func (s *RequestCacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MemorySizeInBytes", err) } s.MemorySizeInBytes = value case float64: @@ -118,7 +119,7 @@ func (s *RequestCacheStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MissCount", err) } s.MissCount = value case float64: diff --git a/typedapi/types/requestcounts.go b/typedapi/types/requestcounts.go index 09fa54515b..ba0fb7fcba 100644 --- a/typedapi/types/requestcounts.go +++ b/typedapi/types/requestcounts.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RequestCounts type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/RepositoryMeteringInformation.ts#L76-L103 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/RepositoryMeteringInformation.ts#L76-L103 type RequestCounts struct { // GetBlob Number of Get Blob requests (Azure) GetBlob *int64 `json:"GetBlob,omitempty"` @@ -83,7 +84,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "GetBlob", err) } s.GetBlob = &value case float64: @@ -98,7 +99,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "GetBlobProperties", err) } s.GetBlobProperties = &value case float64: @@ -113,7 +114,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "GetObject", err) } s.GetObject = &value case float64: @@ -128,7 +129,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "InsertObject", err) } s.InsertObject = &value case float64: @@ -143,7 +144,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ListBlobs", err) } s.ListBlobs = &value case float64: @@ -158,7 +159,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ListObjects", err) } s.ListObjects = &value case float64: @@ -173,7 +174,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PutBlob", err) } s.PutBlob = &value case float64: @@ -188,7 +189,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PutBlock", err) } s.PutBlock = &value case float64: @@ -203,7 +204,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PutBlockList", err) } s.PutBlockList = &value case float64: @@ -218,7 +219,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PutMultipartObject", err) } s.PutMultipartObject = &value case float64: @@ -233,7 +234,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PutObject", err) } s.PutObject = &value case float64: diff --git a/typedapi/types/requestitem.go b/typedapi/types/requestitem.go index 88f8bb4b0a..9b5558e54e 100644 --- a/typedapi/types/requestitem.go +++ b/typedapi/types/requestitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // MultisearchHeader // TemplateConfig // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/msearch_template/types.ts#L25-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/msearch_template/types.ts#L25-L26 type RequestItem interface{} diff --git a/typedapi/types/reroutedecision.go b/typedapi/types/reroutedecision.go index 6e19faf2c2..2ee22bc9b9 100644 --- a/typedapi/types/reroutedecision.go +++ b/typedapi/types/reroutedecision.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RerouteDecision type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/reroute/types.ts#L86-L90 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/reroute/types.ts#L86-L90 type RerouteDecision struct { Decider string `json:"decider"` Decision string `json:"decision"` @@ -55,7 +56,7 @@ func (s *RerouteDecision) UnmarshalJSON(data []byte) error { case "decider": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Decider", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -67,7 +68,7 @@ func (s *RerouteDecision) UnmarshalJSON(data []byte) error { case "decision": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Decision", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *RerouteDecision) UnmarshalJSON(data []byte) error { case "explanation": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Explanation", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/rerouteexplanation.go b/typedapi/types/rerouteexplanation.go index c09ac9a5d6..34c5bf8af8 100644 --- a/typedapi/types/rerouteexplanation.go +++ b/typedapi/types/rerouteexplanation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RerouteExplanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/reroute/types.ts#L92-L96 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/reroute/types.ts#L92-L96 type RerouteExplanation struct { Command string `json:"command"` Decisions []RerouteDecision `json:"decisions"` @@ -55,7 +56,7 @@ func (s *RerouteExplanation) UnmarshalJSON(data []byte) error { case "command": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Command", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,12 +67,12 @@ func (s *RerouteExplanation) UnmarshalJSON(data []byte) error { case "decisions": if err := dec.Decode(&s.Decisions); err != nil { - return err + return fmt.Errorf("%s | %w", "Decisions", err) } case "parameters": if err := dec.Decode(&s.Parameters); err != nil { - return err + return fmt.Errorf("%s | %w", "Parameters", err) } } diff --git a/typedapi/types/rerouteparameters.go b/typedapi/types/rerouteparameters.go index 19d34b4d64..2019bbdef2 100644 --- a/typedapi/types/rerouteparameters.go +++ b/typedapi/types/rerouteparameters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RerouteParameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/reroute/types.ts#L98-L105 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/reroute/types.ts#L98-L105 type RerouteParameters struct { AllowPrimary bool `json:"allow_primary"` FromNode *string `json:"from_node,omitempty"` @@ -62,7 +63,7 @@ func (s *RerouteParameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowPrimary", err) } s.AllowPrimary = value case bool: @@ -71,17 +72,17 @@ func (s *RerouteParameters) UnmarshalJSON(data []byte) error { case "from_node": if err := dec.Decode(&s.FromNode); err != nil { - return err + return fmt.Errorf("%s | %w", "FromNode", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "node": if err := dec.Decode(&s.Node); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } case "shard": @@ -92,7 +93,7 @@ func (s *RerouteParameters) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Shard", err) } s.Shard = value case float64: @@ -102,7 +103,7 @@ func (s *RerouteParameters) UnmarshalJSON(data []byte) error { case "to_node": if err := dec.Decode(&s.ToNode); err != nil { - return err + return fmt.Errorf("%s | %w", "ToNode", err) } } diff --git a/typedapi/types/rerouteprocessor.go b/typedapi/types/rerouteprocessor.go index 6e175cc1b7..e565680c56 100644 --- a/typedapi/types/rerouteprocessor.go +++ b/typedapi/types/rerouteprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RerouteProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L971-L999 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L971-L999 type RerouteProcessor struct { // Dataset Field references or a static value for the dataset part of the data stream // name. @@ -98,20 +99,20 @@ func (s *RerouteProcessor) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Dataset", err) } s.Dataset = append(s.Dataset, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Dataset); err != nil { - return err + return fmt.Errorf("%s | %w", "Dataset", err) } } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -123,7 +124,7 @@ func (s *RerouteProcessor) UnmarshalJSON(data []byte) error { case "destination": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Destination", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -135,7 +136,7 @@ func (s *RerouteProcessor) UnmarshalJSON(data []byte) error { case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -151,7 +152,7 @@ func (s *RerouteProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -164,25 +165,25 @@ func (s *RerouteProcessor) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Namespace", err) } s.Namespace = append(s.Namespace, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Namespace); err != nil { - return err + return fmt.Errorf("%s | %w", "Namespace", err) } } case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/rescore.go b/typedapi/types/rescore.go index 74dd903d68..b6698595fb 100644 --- a/typedapi/types/rescore.go +++ b/typedapi/types/rescore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Rescore type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/rescoring.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/rescoring.ts#L23-L26 type Rescore struct { Query RescoreQuery `json:"query"` WindowSize *int `json:"window_size,omitempty"` @@ -53,7 +54,7 @@ func (s *Rescore) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "window_size": @@ -64,7 +65,7 @@ func (s *Rescore) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "WindowSize", err) } s.WindowSize = &value case float64: diff --git a/typedapi/types/rescorequery.go b/typedapi/types/rescorequery.go index 030790fe69..292544af7d 100644 --- a/typedapi/types/rescorequery.go +++ b/typedapi/types/rescorequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // RescoreQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/rescoring.ts#L28-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/rescoring.ts#L28-L50 type RescoreQuery struct { // Query The query to use for rescoring. // This query is only run on the Top-K results returned by the `query` and @@ -63,7 +64,7 @@ func (s *RescoreQuery) UnmarshalJSON(data []byte) error { case "rescore_query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "query_weight": @@ -73,7 +74,7 @@ func (s *RescoreQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "QueryWeight", err) } f := Float64(value) s.QueryWeight = &f @@ -89,7 +90,7 @@ func (s *RescoreQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RescoreQueryWeight", err) } f := Float64(value) s.RescoreQueryWeight = &f @@ -100,7 +101,7 @@ func (s *RescoreQuery) UnmarshalJSON(data []byte) error { case "score_mode": if err := dec.Decode(&s.ScoreMode); err != nil { - return err + return fmt.Errorf("%s | %w", "ScoreMode", err) } } diff --git a/typedapi/types/reservedsize.go b/typedapi/types/reservedsize.go index 91b1cea704..1f27a430cc 100644 --- a/typedapi/types/reservedsize.go +++ b/typedapi/types/reservedsize.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ReservedSize type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/types.ts#L71-L76 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/types.ts#L71-L76 type ReservedSize struct { NodeId string `json:"node_id"` Path string `json:"path"` @@ -55,13 +56,13 @@ func (s *ReservedSize) UnmarshalJSON(data []byte) error { case "node_id": if err := dec.Decode(&s.NodeId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } case "path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,7 +73,7 @@ func (s *ReservedSize) UnmarshalJSON(data []byte) error { case "shards": if err := dec.Decode(&s.Shards); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards", err) } case "total": @@ -82,7 +83,7 @@ func (s *ReservedSize) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: diff --git a/typedapi/types/resolveclusterinfo.go b/typedapi/types/resolveclusterinfo.go new file mode 100644 index 0000000000..fcd4cff4e3 --- /dev/null +++ b/typedapi/types/resolveclusterinfo.go @@ -0,0 +1,137 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// ResolveClusterInfo type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/resolve_cluster/ResolveClusterResponse.ts#L28-L54 +type ResolveClusterInfo struct { + // Connected Whether the remote cluster is connected to the local (querying) cluster. + Connected bool `json:"connected"` + // Error Provides error messages that are likely to occur if you do a search with this + // index expression + // on the specified cluster (e.g., lack of security privileges to query an + // index). + Error *string `json:"error,omitempty"` + // MatchingIndices Whether the index expression provided in the request matches any indices, + // aliases or data streams + // on the cluster. + MatchingIndices *bool `json:"matching_indices,omitempty"` + // SkipUnavailable The skip_unavailable setting for a remote cluster. + SkipUnavailable bool `json:"skip_unavailable"` + // Version Provides version information about the cluster. + Version *ElasticsearchVersionMinInfo `json:"version,omitempty"` +} + +func (s *ResolveClusterInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "connected": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Connected", err) + } + s.Connected = value + case bool: + s.Connected = v + } + + case "error": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Error", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Error = &o + + case "matching_indices": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "MatchingIndices", err) + } + s.MatchingIndices = &value + case bool: + s.MatchingIndices = &v + } + + case "skip_unavailable": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "SkipUnavailable", err) + } + s.SkipUnavailable = value + case bool: + s.SkipUnavailable = v + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return fmt.Errorf("%s | %w", "Version", err) + } + + } + } + return nil +} + +// NewResolveClusterInfo returns a ResolveClusterInfo. +func NewResolveClusterInfo() *ResolveClusterInfo { + r := &ResolveClusterInfo{} + + return r +} diff --git a/typedapi/types/resolveindexaliasitem.go b/typedapi/types/resolveindexaliasitem.go index 7517a742eb..20a567ab91 100644 --- a/typedapi/types/resolveindexaliasitem.go +++ b/typedapi/types/resolveindexaliasitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ResolveIndexAliasItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/resolve_index/ResolveIndexResponse.ts#L37-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/resolve_index/ResolveIndexResponse.ts#L37-L40 type ResolveIndexAliasItem struct { Indices []string `json:"indices"` Name string `json:"name"` @@ -56,19 +57,19 @@ func (s *ResolveIndexAliasItem) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } s.Indices = append(s.Indices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/resolveindexdatastreamsitem.go b/typedapi/types/resolveindexdatastreamsitem.go index 8e9e1f0b4d..0b4defa175 100644 --- a/typedapi/types/resolveindexdatastreamsitem.go +++ b/typedapi/types/resolveindexdatastreamsitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ResolveIndexDataStreamsItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/resolve_index/ResolveIndexResponse.ts#L42-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/resolve_index/ResolveIndexResponse.ts#L42-L46 type ResolveIndexDataStreamsItem struct { BackingIndices []string `json:"backing_indices"` Name string `json:"name"` @@ -57,24 +58,24 @@ func (s *ResolveIndexDataStreamsItem) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "BackingIndices", err) } s.BackingIndices = append(s.BackingIndices, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.BackingIndices); err != nil { - return err + return fmt.Errorf("%s | %w", "BackingIndices", err) } } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "timestamp_field": if err := dec.Decode(&s.TimestampField); err != nil { - return err + return fmt.Errorf("%s | %w", "TimestampField", err) } } diff --git a/typedapi/types/resolveindexitem.go b/typedapi/types/resolveindexitem.go index 9baf6d6b0b..75ea86098d 100644 --- a/typedapi/types/resolveindexitem.go +++ b/typedapi/types/resolveindexitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ResolveIndexItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/resolve_index/ResolveIndexResponse.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/resolve_index/ResolveIndexResponse.ts#L30-L35 type ResolveIndexItem struct { Aliases []string `json:"aliases,omitempty"` Attributes []string `json:"attributes"` @@ -54,22 +55,22 @@ func (s *ResolveIndexItem) UnmarshalJSON(data []byte) error { case "aliases": if err := dec.Decode(&s.Aliases); err != nil { - return err + return fmt.Errorf("%s | %w", "Aliases", err) } case "attributes": if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "data_stream": if err := dec.Decode(&s.DataStream); err != nil { - return err + return fmt.Errorf("%s | %w", "DataStream", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/resourceprivileges.go b/typedapi/types/resourceprivileges.go index f46d54aa26..f6ecaa8f72 100644 --- a/typedapi/types/resourceprivileges.go +++ b/typedapi/types/resourceprivileges.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ResourcePrivileges type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/has_privileges/types.ts#L47-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/has_privileges/types.ts#L47-L47 type ResourcePrivileges map[string]Privileges diff --git a/typedapi/types/responsebody.go b/typedapi/types/responsebody.go index 19c2913524..86415bd2e3 100644 --- a/typedapi/types/responsebody.go +++ b/typedapi/types/responsebody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" "strings" @@ -31,7 +32,7 @@ import ( // ResponseBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/SearchResponse.ts#L38-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/SearchResponse.ts#L38-L54 type ResponseBody struct { Aggregations map[string]Aggregate `json:"aggregations,omitempty"` Clusters_ *ClusterStatistics `json:"_clusters,omitempty"` @@ -89,490 +90,490 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -582,7 +583,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } @@ -591,7 +592,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { case "_clusters": if err := dec.Decode(&s.Clusters_); err != nil { - return err + return fmt.Errorf("%s | %w", "Clusters_", err) } case "fields": @@ -599,12 +600,12 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { s.Fields = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "hits": if err := dec.Decode(&s.Hits); err != nil { - return err + return fmt.Errorf("%s | %w", "Hits", err) } case "max_score": @@ -614,7 +615,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxScore", err) } f := Float64(value) s.MaxScore = &f @@ -630,7 +631,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumReducePhases", err) } s.NumReducePhases = &value case float64: @@ -640,22 +641,22 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { case "pit_id": if err := dec.Decode(&s.PitId); err != nil { - return err + return fmt.Errorf("%s | %w", "PitId", err) } case "profile": if err := dec.Decode(&s.Profile); err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } case "_scroll_id": if err := dec.Decode(&s.ScrollId_); err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollId_", err) } case "_shards": if err := dec.Decode(&s.Shards_); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards_", err) } case "suggest": @@ -683,28 +684,28 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { case "completion": o := NewCompletionSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "phrase": o := NewPhraseSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) case "term": o := NewTermSuggest() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) } @@ -714,7 +715,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggest", err) } s.Suggest[value] = append(s.Suggest[value], o) } @@ -728,7 +729,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TerminatedEarly", err) } s.TerminatedEarly = &value case bool: @@ -742,7 +743,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimedOut", err) } s.TimedOut = value case bool: @@ -756,7 +757,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } s.Took = value case float64: diff --git a/typedapi/types/responseitem.go b/typedapi/types/responseitem.go index 229cc60ef0..b341cd9fd3 100644 --- a/typedapi/types/responseitem.go +++ b/typedapi/types/responseitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ResponseItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/bulk/types.ts#L37-L81 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/bulk/types.ts#L37-L81 type ResponseItem struct { // Error Contains additional information about the failed operation. // The parameter is only returned for failed operations. @@ -78,7 +79,7 @@ func (s *ResponseItem) UnmarshalJSON(data []byte) error { case "error": if err := dec.Decode(&s.Error); err != nil { - return err + return fmt.Errorf("%s | %w", "Error", err) } case "forced_refresh": @@ -88,7 +89,7 @@ func (s *ResponseItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ForcedRefresh", err) } s.ForcedRefresh = &value case bool: @@ -97,13 +98,13 @@ func (s *ResponseItem) UnmarshalJSON(data []byte) error { case "get": if err := dec.Decode(&s.Get); err != nil { - return err + return fmt.Errorf("%s | %w", "Get", err) } case "_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -115,7 +116,7 @@ func (s *ResponseItem) UnmarshalJSON(data []byte) error { case "_index": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -131,7 +132,7 @@ func (s *ResponseItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryTerm_", err) } s.PrimaryTerm_ = &value case float64: @@ -142,7 +143,7 @@ func (s *ResponseItem) UnmarshalJSON(data []byte) error { case "result": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Result", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -153,12 +154,12 @@ func (s *ResponseItem) UnmarshalJSON(data []byte) error { case "_seq_no": if err := dec.Decode(&s.SeqNo_); err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNo_", err) } case "_shards": if err := dec.Decode(&s.Shards_); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards_", err) } case "status": @@ -169,7 +170,7 @@ func (s *ResponseItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } s.Status = value case float64: @@ -179,7 +180,7 @@ func (s *ResponseItem) UnmarshalJSON(data []byte) error { case "_version": if err := dec.Decode(&s.Version_); err != nil { - return err + return fmt.Errorf("%s | %w", "Version_", err) } } diff --git a/typedapi/types/retention.go b/typedapi/types/retention.go index b9afe9890c..63cef3c965 100644 --- a/typedapi/types/retention.go +++ b/typedapi/types/retention.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Retention type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/_types/SnapshotLifecycle.ts#L84-L97 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/_types/SnapshotLifecycle.ts#L84-L97 type Retention struct { // ExpireAfter Time period after which a snapshot is considered expired and eligible for // deletion. SLM deletes expired snapshots based on the slm.retention_schedule. @@ -60,7 +61,7 @@ func (s *Retention) UnmarshalJSON(data []byte) error { case "expire_after": if err := dec.Decode(&s.ExpireAfter); err != nil { - return err + return fmt.Errorf("%s | %w", "ExpireAfter", err) } case "max_count": @@ -71,7 +72,7 @@ func (s *Retention) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxCount", err) } s.MaxCount = value case float64: @@ -87,7 +88,7 @@ func (s *Retention) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinCount", err) } s.MinCount = value case float64: diff --git a/typedapi/types/retentionlease.go b/typedapi/types/retentionlease.go index ed49120287..f192555eab 100644 --- a/typedapi/types/retentionlease.go +++ b/typedapi/types/retentionlease.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RetentionLease type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L65-L67 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L65-L67 type RetentionLease struct { Period Duration `json:"period"` } @@ -51,7 +52,7 @@ func (s *RetentionLease) UnmarshalJSON(data []byte) error { case "period": if err := dec.Decode(&s.Period); err != nil { - return err + return fmt.Errorf("%s | %w", "Period", err) } } diff --git a/typedapi/types/retentionpolicy.go b/typedapi/types/retentionpolicy.go index 2ea862211f..2caf30eb18 100644 --- a/typedapi/types/retentionpolicy.go +++ b/typedapi/types/retentionpolicy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RetentionPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/_types/Transform.ts#L88-L96 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/_types/Transform.ts#L88-L96 type RetentionPolicy struct { // Field The date field that is used to calculate the age of the document. Field string `json:"field"` @@ -56,12 +57,12 @@ func (s *RetentionPolicy) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "max_age": if err := dec.Decode(&s.MaxAge); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAge", err) } } diff --git a/typedapi/types/retentionpolicycontainer.go b/typedapi/types/retentionpolicycontainer.go index 328c0a2ad7..444fd418dd 100644 --- a/typedapi/types/retentionpolicycontainer.go +++ b/typedapi/types/retentionpolicycontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // RetentionPolicyContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/_types/Transform.ts#L80-L86 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/_types/Transform.ts#L80-L86 type RetentionPolicyContainer struct { // Time Specifies that the transform uses a time field to set the retention policy. Time *RetentionPolicy `json:"time,omitempty"` diff --git a/typedapi/types/retries.go b/typedapi/types/retries.go index 36c5542cd6..8b03aaaf99 100644 --- a/typedapi/types/retries.go +++ b/typedapi/types/retries.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Retries type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Retries.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Retries.ts#L22-L25 type Retries struct { Bulk int64 `json:"bulk"` Search int64 `json:"search"` @@ -58,7 +59,7 @@ func (s *Retries) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Bulk", err) } s.Bulk = value case float64: @@ -73,7 +74,7 @@ func (s *Retries) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Search", err) } s.Search = value case float64: diff --git a/typedapi/types/reversenestedaggregate.go b/typedapi/types/reversenestedaggregate.go index 66f821a3b2..445f39a4ee 100644 --- a/typedapi/types/reversenestedaggregate.go +++ b/typedapi/types/reversenestedaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // ReverseNestedAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L489-L490 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L489-L490 type ReverseNestedAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *ReverseNestedAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -71,7 +71,7 @@ func (s *ReverseNestedAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } default: @@ -88,490 +88,490 @@ func (s *ReverseNestedAggregate) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -581,7 +581,7 @@ func (s *ReverseNestedAggregate) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/reversenestedaggregation.go b/typedapi/types/reversenestedaggregation.go index fb01474e3a..fc54f84940 100644 --- a/typedapi/types/reversenestedaggregation.go +++ b/typedapi/types/reversenestedaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ReverseNestedAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L719-L725 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L719-L725 type ReverseNestedAggregation struct { Meta Metadata `json:"meta,omitempty"` Name *string `json:"name,omitempty"` @@ -57,13 +58,13 @@ func (s *ReverseNestedAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,7 +75,7 @@ func (s *ReverseNestedAggregation) UnmarshalJSON(data []byte) error { case "path": if err := dec.Decode(&s.Path); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } } diff --git a/typedapi/types/reversetokenfilter.go b/typedapi/types/reversetokenfilter.go index 66e57d63eb..1620676aa6 100644 --- a/typedapi/types/reversetokenfilter.go +++ b/typedapi/types/reversetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ReverseTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L305-L307 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L305-L307 type ReverseTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` @@ -52,12 +53,12 @@ func (s *ReverseTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/role.go b/typedapi/types/role.go index facba37479..aa07a955c9 100644 --- a/typedapi/types/role.go +++ b/typedapi/types/role.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Role type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_role/types.ts#L29-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_role/types.ts#L29-L42 type Role struct { Applications []ApplicationPrivileges `json:"applications"` Cluster []string `json:"cluster"` @@ -38,7 +39,7 @@ type Role struct { Metadata Metadata `json:"metadata"` RoleTemplates []RoleTemplate `json:"role_templates,omitempty"` RunAs []string `json:"run_as"` - TransientMetadata TransientMetadataConfig `json:"transient_metadata"` + TransientMetadata map[string]json.RawMessage `json:"transient_metadata,omitempty"` } func (s *Role) UnmarshalJSON(data []byte) error { @@ -58,12 +59,12 @@ func (s *Role) UnmarshalJSON(data []byte) error { case "applications": if err := dec.Decode(&s.Applications); err != nil { - return err + return fmt.Errorf("%s | %w", "Applications", err) } case "cluster": if err := dec.Decode(&s.Cluster); err != nil { - return err + return fmt.Errorf("%s | %w", "Cluster", err) } case "global": @@ -71,32 +72,35 @@ func (s *Role) UnmarshalJSON(data []byte) error { s.Global = make(map[string]map[string]map[string][]string, 0) } if err := dec.Decode(&s.Global); err != nil { - return err + return fmt.Errorf("%s | %w", "Global", err) } case "indices": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "role_templates": if err := dec.Decode(&s.RoleTemplates); err != nil { - return err + return fmt.Errorf("%s | %w", "RoleTemplates", err) } case "run_as": if err := dec.Decode(&s.RunAs); err != nil { - return err + return fmt.Errorf("%s | %w", "RunAs", err) } case "transient_metadata": + if s.TransientMetadata == nil { + s.TransientMetadata = make(map[string]json.RawMessage, 0) + } if err := dec.Decode(&s.TransientMetadata); err != nil { - return err + return fmt.Errorf("%s | %w", "TransientMetadata", err) } } @@ -107,7 +111,8 @@ func (s *Role) UnmarshalJSON(data []byte) error { // NewRole returns a Role. func NewRole() *Role { r := &Role{ - Global: make(map[string]map[string]map[string][]string, 0), + Global: make(map[string]map[string]map[string][]string, 0), + TransientMetadata: make(map[string]json.RawMessage, 0), } return r diff --git a/typedapi/types/roledescriptor.go b/typedapi/types/roledescriptor.go index 145c3f16e1..105c469ce8 100644 --- a/typedapi/types/roledescriptor.go +++ b/typedapi/types/roledescriptor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RoleDescriptor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/RoleDescriptor.ts#L27-L55 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/RoleDescriptor.ts#L28-L56 type RoleDescriptor struct { // Applications A list of application privilege entries Applications []ApplicationPrivileges `json:"applications,omitempty"` @@ -46,8 +47,8 @@ type RoleDescriptor struct { // reserved for system usage. Metadata Metadata `json:"metadata,omitempty"` // RunAs A list of users that the API keys can impersonate. - RunAs []string `json:"run_as,omitempty"` - TransientMetadata *TransientMetadataConfig `json:"transient_metadata,omitempty"` + RunAs []string `json:"run_as,omitempty"` + TransientMetadata map[string]json.RawMessage `json:"transient_metadata,omitempty"` } func (s *RoleDescriptor) UnmarshalJSON(data []byte) error { @@ -67,12 +68,12 @@ func (s *RoleDescriptor) UnmarshalJSON(data []byte) error { case "applications": if err := dec.Decode(&s.Applications); err != nil { - return err + return fmt.Errorf("%s | %w", "Applications", err) } case "cluster": if err := dec.Decode(&s.Cluster); err != nil { - return err + return fmt.Errorf("%s | %w", "Cluster", err) } case "global": @@ -81,34 +82,37 @@ func (s *RoleDescriptor) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewGlobalPrivilege() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Global", err) } s.Global = append(s.Global, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Global); err != nil { - return err + return fmt.Errorf("%s | %w", "Global", err) } } case "indices", "index": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "run_as": if err := dec.Decode(&s.RunAs); err != nil { - return err + return fmt.Errorf("%s | %w", "RunAs", err) } case "transient_metadata": + if s.TransientMetadata == nil { + s.TransientMetadata = make(map[string]json.RawMessage, 0) + } if err := dec.Decode(&s.TransientMetadata); err != nil { - return err + return fmt.Errorf("%s | %w", "TransientMetadata", err) } } @@ -118,7 +122,9 @@ func (s *RoleDescriptor) UnmarshalJSON(data []byte) error { // NewRoleDescriptor returns a RoleDescriptor. func NewRoleDescriptor() *RoleDescriptor { - r := &RoleDescriptor{} + r := &RoleDescriptor{ + TransientMetadata: make(map[string]json.RawMessage, 0), + } return r } diff --git a/typedapi/types/roledescriptorread.go b/typedapi/types/roledescriptorread.go index 1710996485..5934f2b061 100644 --- a/typedapi/types/roledescriptorread.go +++ b/typedapi/types/roledescriptorread.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RoleDescriptorRead type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/RoleDescriptor.ts#L57-L85 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/RoleDescriptor.ts#L58-L86 type RoleDescriptorRead struct { // Applications A list of application privilege entries Applications []ApplicationPrivileges `json:"applications,omitempty"` @@ -46,8 +47,8 @@ type RoleDescriptorRead struct { // reserved for system usage. Metadata Metadata `json:"metadata,omitempty"` // RunAs A list of users that the API keys can impersonate. - RunAs []string `json:"run_as,omitempty"` - TransientMetadata *TransientMetadataConfig `json:"transient_metadata,omitempty"` + RunAs []string `json:"run_as,omitempty"` + TransientMetadata map[string]json.RawMessage `json:"transient_metadata,omitempty"` } func (s *RoleDescriptorRead) UnmarshalJSON(data []byte) error { @@ -67,12 +68,12 @@ func (s *RoleDescriptorRead) UnmarshalJSON(data []byte) error { case "applications": if err := dec.Decode(&s.Applications); err != nil { - return err + return fmt.Errorf("%s | %w", "Applications", err) } case "cluster": if err := dec.Decode(&s.Cluster); err != nil { - return err + return fmt.Errorf("%s | %w", "Cluster", err) } case "global": @@ -81,34 +82,37 @@ func (s *RoleDescriptorRead) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewGlobalPrivilege() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Global", err) } s.Global = append(s.Global, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Global); err != nil { - return err + return fmt.Errorf("%s | %w", "Global", err) } } case "indices", "index": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "run_as": if err := dec.Decode(&s.RunAs); err != nil { - return err + return fmt.Errorf("%s | %w", "RunAs", err) } case "transient_metadata": + if s.TransientMetadata == nil { + s.TransientMetadata = make(map[string]json.RawMessage, 0) + } if err := dec.Decode(&s.TransientMetadata); err != nil { - return err + return fmt.Errorf("%s | %w", "TransientMetadata", err) } } @@ -118,7 +122,9 @@ func (s *RoleDescriptorRead) UnmarshalJSON(data []byte) error { // NewRoleDescriptorRead returns a RoleDescriptorRead. func NewRoleDescriptorRead() *RoleDescriptorRead { - r := &RoleDescriptorRead{} + r := &RoleDescriptorRead{ + TransientMetadata: make(map[string]json.RawMessage, 0), + } return r } diff --git a/typedapi/types/roledescriptorwrapper.go b/typedapi/types/roledescriptorwrapper.go index 59cbc803a1..4513af82db 100644 --- a/typedapi/types/roledescriptorwrapper.go +++ b/typedapi/types/roledescriptorwrapper.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // RoleDescriptorWrapper type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_service_accounts/types.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_service_accounts/types.ts#L22-L24 type RoleDescriptorWrapper struct { RoleDescriptor RoleDescriptorRead `json:"role_descriptor"` } diff --git a/typedapi/types/rolemappingrule.go b/typedapi/types/rolemappingrule.go index 7aca048c1b..3d3a1233a4 100644 --- a/typedapi/types/rolemappingrule.go +++ b/typedapi/types/rolemappingrule.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // RoleMappingRule type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/RoleMappingRule.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/RoleMappingRule.ts#L23-L34 type RoleMappingRule struct { All []RoleMappingRule `json:"all,omitempty"` Any []RoleMappingRule `json:"any,omitempty"` diff --git a/typedapi/types/roletemplate.go b/typedapi/types/roletemplate.go index 371bcedbd3..eee040eb5b 100644 --- a/typedapi/types/roletemplate.go +++ b/typedapi/types/roletemplate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/templateformat" @@ -31,7 +32,7 @@ import ( // RoleTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/RoleTemplate.ts#L28-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/RoleTemplate.ts#L28-L31 type RoleTemplate struct { Format *templateformat.TemplateFormat `json:"format,omitempty"` Template Script `json:"template"` @@ -54,13 +55,13 @@ func (s *RoleTemplate) UnmarshalJSON(data []byte) error { case "format": if err := dec.Decode(&s.Format); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } case "template": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -69,7 +70,7 @@ func (s *RoleTemplate) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Template", err) } switch t { @@ -78,7 +79,7 @@ func (s *RoleTemplate) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } s.Template = o @@ -86,7 +87,7 @@ func (s *RoleTemplate) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } s.Template = o diff --git a/typedapi/types/roletemplateinlinequery.go b/typedapi/types/roletemplateinlinequery.go index 3484b30e37..5217c9738b 100644 --- a/typedapi/types/roletemplateinlinequery.go +++ b/typedapi/types/roletemplateinlinequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // Query // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/Privileges.ts#L160-L161 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/Privileges.ts#L160-L161 type RoleTemplateInlineQuery interface{} diff --git a/typedapi/types/roletemplateinlinescript.go b/typedapi/types/roletemplateinlinescript.go index fad6af5905..da747eb4b5 100644 --- a/typedapi/types/roletemplateinlinescript.go +++ b/typedapi/types/roletemplateinlinescript.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/scriptlanguage" @@ -31,7 +32,7 @@ import ( // RoleTemplateInlineScript type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/Privileges.ts#L153-L158 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/Privileges.ts#L153-L158 type RoleTemplateInlineScript struct { Lang *scriptlanguage.ScriptLanguage `json:"lang,omitempty"` Options map[string]string `json:"options,omitempty"` @@ -63,7 +64,7 @@ func (s *RoleTemplateInlineScript) UnmarshalJSON(data []byte) error { case "lang": if err := dec.Decode(&s.Lang); err != nil { - return err + return fmt.Errorf("%s | %w", "Lang", err) } case "options": @@ -71,7 +72,7 @@ func (s *RoleTemplateInlineScript) UnmarshalJSON(data []byte) error { s.Options = make(map[string]string, 0) } if err := dec.Decode(&s.Options); err != nil { - return err + return fmt.Errorf("%s | %w", "Options", err) } case "params": @@ -79,7 +80,7 @@ func (s *RoleTemplateInlineScript) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "source": @@ -98,7 +99,7 @@ func (s *RoleTemplateInlineScript) UnmarshalJSON(data []byte) error { default: if err := localDec.Decode(&s.Source); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } } diff --git a/typedapi/types/roletemplatequery.go b/typedapi/types/roletemplatequery.go index 2bff0d6628..2fc90be5c7 100644 --- a/typedapi/types/roletemplatequery.go +++ b/typedapi/types/roletemplatequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RoleTemplateQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/Privileges.ts#L141-L151 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/Privileges.ts#L141-L151 type RoleTemplateQuery struct { // Template When you create a role, you can specify a query that defines the document // level security permissions. You can optionally @@ -60,7 +61,7 @@ func (s *RoleTemplateQuery) UnmarshalJSON(data []byte) error { case "template": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -69,7 +70,7 @@ func (s *RoleTemplateQuery) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Template", err) } switch t { @@ -78,7 +79,7 @@ func (s *RoleTemplateQuery) UnmarshalJSON(data []byte) error { o := NewRoleTemplateInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } s.Template = o @@ -86,7 +87,7 @@ func (s *RoleTemplateQuery) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } s.Template = o diff --git a/typedapi/types/roletemplatescript.go b/typedapi/types/roletemplatescript.go index 43881566f8..e9fc3e9e14 100644 --- a/typedapi/types/roletemplatescript.go +++ b/typedapi/types/roletemplatescript.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // RoleTemplateInlineScript // StoredScriptId // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/Privileges.ts#L163-L164 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/Privileges.ts#L163-L164 type RoleTemplateScript interface{} diff --git a/typedapi/types/rolloverconditions.go b/typedapi/types/rolloverconditions.go index d3d166f9d3..ce225330f6 100644 --- a/typedapi/types/rolloverconditions.go +++ b/typedapi/types/rolloverconditions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RolloverConditions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/rollover/types.ts#L24-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/rollover/types.ts#L24-L40 type RolloverConditions struct { MaxAge Duration `json:"max_age,omitempty"` MaxAgeMillis *int64 `json:"max_age_millis,omitempty"` @@ -66,12 +67,12 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case "max_age": if err := dec.Decode(&s.MaxAge); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAge", err) } case "max_age_millis": if err := dec.Decode(&s.MaxAgeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAgeMillis", err) } case "max_docs": @@ -81,7 +82,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxDocs", err) } s.MaxDocs = &value case float64: @@ -96,7 +97,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxPrimaryShardDocs", err) } s.MaxPrimaryShardDocs = &value case float64: @@ -106,7 +107,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case "max_primary_shard_size": if err := dec.Decode(&s.MaxPrimaryShardSize); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxPrimaryShardSize", err) } case "max_primary_shard_size_bytes": @@ -116,7 +117,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxPrimaryShardSizeBytes", err) } s.MaxPrimaryShardSizeBytes = &value case float64: @@ -126,7 +127,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case "max_size": if err := dec.Decode(&s.MaxSize); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxSize", err) } case "max_size_bytes": @@ -136,7 +137,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxSizeBytes", err) } s.MaxSizeBytes = &value case float64: @@ -146,7 +147,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case "min_age": if err := dec.Decode(&s.MinAge); err != nil { - return err + return fmt.Errorf("%s | %w", "MinAge", err) } case "min_docs": @@ -156,7 +157,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocs", err) } s.MinDocs = &value case float64: @@ -171,7 +172,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinPrimaryShardDocs", err) } s.MinPrimaryShardDocs = &value case float64: @@ -181,7 +182,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case "min_primary_shard_size": if err := dec.Decode(&s.MinPrimaryShardSize); err != nil { - return err + return fmt.Errorf("%s | %w", "MinPrimaryShardSize", err) } case "min_primary_shard_size_bytes": @@ -191,7 +192,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinPrimaryShardSizeBytes", err) } s.MinPrimaryShardSizeBytes = &value case float64: @@ -201,7 +202,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case "min_size": if err := dec.Decode(&s.MinSize); err != nil { - return err + return fmt.Errorf("%s | %w", "MinSize", err) } case "min_size_bytes": @@ -211,7 +212,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinSizeBytes", err) } s.MinSizeBytes = &value case float64: diff --git a/typedapi/types/rollupcapabilities.go b/typedapi/types/rollupcapabilities.go index a39cef6047..341a13dc43 100644 --- a/typedapi/types/rollupcapabilities.go +++ b/typedapi/types/rollupcapabilities.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // RollupCapabilities type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_rollup_caps/types.ts#L25-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_rollup_caps/types.ts#L25-L27 type RollupCapabilities struct { RollupJobs []RollupCapabilitySummary `json:"rollup_jobs"` } diff --git a/typedapi/types/rollupcapabilitysummary.go b/typedapi/types/rollupcapabilitysummary.go index c4051bbae6..a07b5b0bd9 100644 --- a/typedapi/types/rollupcapabilitysummary.go +++ b/typedapi/types/rollupcapabilitysummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RollupCapabilitySummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_rollup_caps/types.ts#L29-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_rollup_caps/types.ts#L29-L34 type RollupCapabilitySummary struct { Fields map[string][]RollupFieldSummary `json:"fields"` IndexPattern string `json:"index_pattern"` @@ -58,13 +59,13 @@ func (s *RollupCapabilitySummary) UnmarshalJSON(data []byte) error { s.Fields = make(map[string][]RollupFieldSummary, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "index_pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -76,7 +77,7 @@ func (s *RollupCapabilitySummary) UnmarshalJSON(data []byte) error { case "job_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -88,7 +89,7 @@ func (s *RollupCapabilitySummary) UnmarshalJSON(data []byte) error { case "rollup_index": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RollupIndex", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/rollupfieldsummary.go b/typedapi/types/rollupfieldsummary.go index 3f344053e8..969cf6a247 100644 --- a/typedapi/types/rollupfieldsummary.go +++ b/typedapi/types/rollupfieldsummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RollupFieldSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_rollup_caps/types.ts#L36-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_rollup_caps/types.ts#L36-L40 type RollupFieldSummary struct { Agg string `json:"agg"` CalendarInterval Duration `json:"calendar_interval,omitempty"` @@ -55,7 +56,7 @@ func (s *RollupFieldSummary) UnmarshalJSON(data []byte) error { case "agg": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Agg", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,12 +67,12 @@ func (s *RollupFieldSummary) UnmarshalJSON(data []byte) error { case "calendar_interval": if err := dec.Decode(&s.CalendarInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "CalendarInterval", err) } case "time_zone": if err := dec.Decode(&s.TimeZone); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeZone", err) } } diff --git a/typedapi/types/rollupjob.go b/typedapi/types/rollupjob.go index 77da99b2bf..3b2daa2596 100644 --- a/typedapi/types/rollupjob.go +++ b/typedapi/types/rollupjob.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // RollupJob type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_jobs/types.ts#L28-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_jobs/types.ts#L28-L32 type RollupJob struct { Config RollupJobConfiguration `json:"config"` Stats RollupJobStats `json:"stats"` diff --git a/typedapi/types/rollupjobconfiguration.go b/typedapi/types/rollupjobconfiguration.go index 719bcf5ce9..a8860a1cdd 100644 --- a/typedapi/types/rollupjobconfiguration.go +++ b/typedapi/types/rollupjobconfiguration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RollupJobConfiguration type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_jobs/types.ts#L34-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_jobs/types.ts#L34-L43 type RollupJobConfiguration struct { Cron string `json:"cron"` Groups Groupings `json:"groups"` @@ -60,7 +61,7 @@ func (s *RollupJobConfiguration) UnmarshalJSON(data []byte) error { case "cron": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Cron", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -71,18 +72,18 @@ func (s *RollupJobConfiguration) UnmarshalJSON(data []byte) error { case "groups": if err := dec.Decode(&s.Groups); err != nil { - return err + return fmt.Errorf("%s | %w", "Groups", err) } case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "index_pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -93,7 +94,7 @@ func (s *RollupJobConfiguration) UnmarshalJSON(data []byte) error { case "metrics": if err := dec.Decode(&s.Metrics); err != nil { - return err + return fmt.Errorf("%s | %w", "Metrics", err) } case "page_size": @@ -103,7 +104,7 @@ func (s *RollupJobConfiguration) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PageSize", err) } s.PageSize = value case float64: @@ -113,12 +114,12 @@ func (s *RollupJobConfiguration) UnmarshalJSON(data []byte) error { case "rollup_index": if err := dec.Decode(&s.RollupIndex); err != nil { - return err + return fmt.Errorf("%s | %w", "RollupIndex", err) } case "timeout": if err := dec.Decode(&s.Timeout); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } } diff --git a/typedapi/types/rollupjobstats.go b/typedapi/types/rollupjobstats.go index c445c03255..284f9de53f 100644 --- a/typedapi/types/rollupjobstats.go +++ b/typedapi/types/rollupjobstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RollupJobStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_jobs/types.ts#L45-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_jobs/types.ts#L45-L58 type RollupJobStats struct { DocumentsProcessed int64 `json:"documents_processed"` IndexFailures int64 `json:"index_failures"` @@ -68,7 +69,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocumentsProcessed", err) } s.DocumentsProcessed = value case float64: @@ -83,7 +84,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexFailures", err) } s.IndexFailures = value case float64: @@ -93,7 +94,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { case "index_time_in_ms": if err := dec.Decode(&s.IndexTimeInMs); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexTimeInMs", err) } case "index_total": @@ -103,7 +104,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexTotal", err) } s.IndexTotal = value case float64: @@ -118,7 +119,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PagesProcessed", err) } s.PagesProcessed = value case float64: @@ -128,7 +129,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { case "processing_time_in_ms": if err := dec.Decode(&s.ProcessingTimeInMs); err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessingTimeInMs", err) } case "processing_total": @@ -138,7 +139,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessingTotal", err) } s.ProcessingTotal = value case float64: @@ -153,7 +154,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RollupsIndexed", err) } s.RollupsIndexed = value case float64: @@ -168,7 +169,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SearchFailures", err) } s.SearchFailures = value case float64: @@ -178,7 +179,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { case "search_time_in_ms": if err := dec.Decode(&s.SearchTimeInMs); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchTimeInMs", err) } case "search_total": @@ -188,7 +189,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SearchTotal", err) } s.SearchTotal = value case float64: @@ -203,7 +204,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TriggerCount", err) } s.TriggerCount = value case float64: diff --git a/typedapi/types/rollupjobstatus.go b/typedapi/types/rollupjobstatus.go index 14bea4a1e6..d0ed81d100 100644 --- a/typedapi/types/rollupjobstatus.go +++ b/typedapi/types/rollupjobstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // RollupJobStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_jobs/types.ts#L60-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_jobs/types.ts#L60-L64 type RollupJobStatus struct { CurrentPosition map[string]json.RawMessage `json:"current_position,omitempty"` JobState indexingjobstate.IndexingJobState `json:"job_state"` @@ -59,12 +60,12 @@ func (s *RollupJobStatus) UnmarshalJSON(data []byte) error { s.CurrentPosition = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.CurrentPosition); err != nil { - return err + return fmt.Errorf("%s | %w", "CurrentPosition", err) } case "job_state": if err := dec.Decode(&s.JobState); err != nil { - return err + return fmt.Errorf("%s | %w", "JobState", err) } case "upgraded_doc_id": @@ -74,7 +75,7 @@ func (s *RollupJobStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UpgradedDocId", err) } s.UpgradedDocId = &value case bool: diff --git a/typedapi/types/rollupjobsummary.go b/typedapi/types/rollupjobsummary.go index 2f6a4d61b7..b17cbb4fc0 100644 --- a/typedapi/types/rollupjobsummary.go +++ b/typedapi/types/rollupjobsummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RollupJobSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_rollup_index_caps/types.ts#L28-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_rollup_index_caps/types.ts#L28-L33 type RollupJobSummary struct { Fields map[string][]RollupJobSummaryField `json:"fields"` IndexPattern string `json:"index_pattern"` @@ -58,13 +59,13 @@ func (s *RollupJobSummary) UnmarshalJSON(data []byte) error { s.Fields = make(map[string][]RollupJobSummaryField, 0) } if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "index_pattern": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPattern", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -75,12 +76,12 @@ func (s *RollupJobSummary) UnmarshalJSON(data []byte) error { case "job_id": if err := dec.Decode(&s.JobId); err != nil { - return err + return fmt.Errorf("%s | %w", "JobId", err) } case "rollup_index": if err := dec.Decode(&s.RollupIndex); err != nil { - return err + return fmt.Errorf("%s | %w", "RollupIndex", err) } } diff --git a/typedapi/types/rollupjobsummaryfield.go b/typedapi/types/rollupjobsummaryfield.go index 4d5d96df1a..1649817b5b 100644 --- a/typedapi/types/rollupjobsummaryfield.go +++ b/typedapi/types/rollupjobsummaryfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RollupJobSummaryField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/get_rollup_index_caps/types.ts#L35-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/get_rollup_index_caps/types.ts#L35-L39 type RollupJobSummaryField struct { Agg string `json:"agg"` CalendarInterval Duration `json:"calendar_interval,omitempty"` @@ -55,7 +56,7 @@ func (s *RollupJobSummaryField) UnmarshalJSON(data []byte) error { case "agg": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Agg", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,12 +67,12 @@ func (s *RollupJobSummaryField) UnmarshalJSON(data []byte) error { case "calendar_interval": if err := dec.Decode(&s.CalendarInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "CalendarInterval", err) } case "time_zone": if err := dec.Decode(&s.TimeZone); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeZone", err) } } diff --git a/typedapi/types/routingfield.go b/typedapi/types/routingfield.go index c1c7d2f56e..ea3efb3f8d 100644 --- a/typedapi/types/routingfield.go +++ b/typedapi/types/routingfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RoutingField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/meta-fields.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/meta-fields.ts#L50-L52 type RoutingField struct { Required bool `json:"required"` } @@ -57,7 +58,7 @@ func (s *RoutingField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Required", err) } s.Required = value case bool: diff --git a/typedapi/types/rrfrank.go b/typedapi/types/rrfrank.go index 0b4648acc4..fbb702b846 100644 --- a/typedapi/types/rrfrank.go +++ b/typedapi/types/rrfrank.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RrfRank type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Rank.ts#L32-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Rank.ts#L32-L37 type RrfRank struct { // RankConstant How much influence documents in individual result sets per query have over // the final ranked result set @@ -61,7 +62,7 @@ func (s *RrfRank) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RankConstant", err) } s.RankConstant = &value case float64: @@ -76,7 +77,7 @@ func (s *RrfRank) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "WindowSize", err) } s.WindowSize = &value case float64: diff --git a/typedapi/types/rulecondition.go b/typedapi/types/rulecondition.go index 4b3b625f65..61c0a96b29 100644 --- a/typedapi/types/rulecondition.go +++ b/typedapi/types/rulecondition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // RuleCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Rule.ts#L52-L65 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Rule.ts#L52-L65 type RuleCondition struct { // AppliesTo Specifies the result property to which the condition applies. If your // detector uses `lat_long`, `metric`, `rare`, or `freq_rare` functions, you can @@ -63,12 +64,12 @@ func (s *RuleCondition) UnmarshalJSON(data []byte) error { case "applies_to": if err := dec.Decode(&s.AppliesTo); err != nil { - return err + return fmt.Errorf("%s | %w", "AppliesTo", err) } case "operator": if err := dec.Decode(&s.Operator); err != nil { - return err + return fmt.Errorf("%s | %w", "Operator", err) } case "value": @@ -78,7 +79,7 @@ func (s *RuleCondition) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } f := Float64(value) s.Value = f diff --git a/typedapi/types/rulequery.go b/typedapi/types/rulequery.go index 6230ae3673..389103be79 100644 --- a/typedapi/types/rulequery.go +++ b/typedapi/types/rulequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RuleQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L369-L373 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L369-L373 type RuleQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -66,7 +67,7 @@ func (s *RuleQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -77,18 +78,18 @@ func (s *RuleQuery) UnmarshalJSON(data []byte) error { case "match_criteria": if err := dec.Decode(&s.MatchCriteria); err != nil { - return err + return fmt.Errorf("%s | %w", "MatchCriteria", err) } case "organic": if err := dec.Decode(&s.Organic); err != nil { - return err + return fmt.Errorf("%s | %w", "Organic", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -99,7 +100,7 @@ func (s *RuleQuery) UnmarshalJSON(data []byte) error { case "ruleset_id": if err := dec.Decode(&s.RulesetId); err != nil { - return err + return fmt.Errorf("%s | %w", "RulesetId", err) } } diff --git a/typedapi/types/runningstatesearchinterval.go b/typedapi/types/runningstatesearchinterval.go index bf207ea0a4..0621746336 100644 --- a/typedapi/types/runningstatesearchinterval.go +++ b/typedapi/types/runningstatesearchinterval.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // RunningStateSearchInterval type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Datafeed.ts#L214-L231 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Datafeed.ts#L214-L231 type RunningStateSearchInterval struct { // End The end time. End Duration `json:"end,omitempty"` @@ -58,22 +59,22 @@ func (s *RunningStateSearchInterval) UnmarshalJSON(data []byte) error { case "end": if err := dec.Decode(&s.End); err != nil { - return err + return fmt.Errorf("%s | %w", "End", err) } case "end_ms": if err := dec.Decode(&s.EndMs); err != nil { - return err + return fmt.Errorf("%s | %w", "EndMs", err) } case "start": if err := dec.Decode(&s.Start); err != nil { - return err + return fmt.Errorf("%s | %w", "Start", err) } case "start_ms": if err := dec.Decode(&s.StartMs); err != nil { - return err + return fmt.Errorf("%s | %w", "StartMs", err) } } diff --git a/typedapi/types/runtimefield.go b/typedapi/types/runtimefield.go index fa7a891607..9c4e6fecde 100644 --- a/typedapi/types/runtimefield.go +++ b/typedapi/types/runtimefield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // RuntimeField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/RuntimeFields.ts#L26-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/RuntimeFields.ts#L26-L48 type RuntimeField struct { // FetchFields For type `lookup` FetchFields []RuntimeFieldFetchFields `json:"fetch_fields,omitempty"` @@ -68,13 +69,13 @@ func (s *RuntimeField) UnmarshalJSON(data []byte) error { case "fetch_fields": if err := dec.Decode(&s.FetchFields); err != nil { - return err + return fmt.Errorf("%s | %w", "FetchFields", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,13 +86,13 @@ func (s *RuntimeField) UnmarshalJSON(data []byte) error { case "input_field": if err := dec.Decode(&s.InputField); err != nil { - return err + return fmt.Errorf("%s | %w", "InputField", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -100,7 +101,7 @@ func (s *RuntimeField) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -109,7 +110,7 @@ func (s *RuntimeField) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -117,7 +118,7 @@ func (s *RuntimeField) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -126,17 +127,17 @@ func (s *RuntimeField) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } case "target_index": if err := dec.Decode(&s.TargetIndex); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetIndex", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/runtimefieldfetchfields.go b/typedapi/types/runtimefieldfetchfields.go index b2a8fe06e7..619fdfc024 100644 --- a/typedapi/types/runtimefieldfetchfields.go +++ b/typedapi/types/runtimefieldfetchfields.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RuntimeFieldFetchFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/RuntimeFields.ts#L50-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/RuntimeFields.ts#L50-L54 type RuntimeFieldFetchFields struct { Field string `json:"field"` Format *string `json:"format,omitempty"` @@ -62,13 +63,13 @@ func (s *RuntimeFieldFetchFields) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/runtimefields.go b/typedapi/types/runtimefields.go index de1316ddb5..32ac5df9f8 100644 --- a/typedapi/types/runtimefields.go +++ b/typedapi/types/runtimefields.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // RuntimeFields type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/RuntimeFields.ts#L24-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/RuntimeFields.ts#L24-L24 type RuntimeFields map[string]RuntimeField diff --git a/typedapi/types/runtimefieldstype.go b/typedapi/types/runtimefieldstype.go index 44b02632d8..749bc3c175 100644 --- a/typedapi/types/runtimefieldstype.go +++ b/typedapi/types/runtimefieldstype.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // RuntimeFieldsType type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L279-L294 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L279-L294 type RuntimeFieldsType struct { CharsMax int64 `json:"chars_max"` CharsTotal int64 `json:"chars_total"` @@ -70,7 +71,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CharsMax", err) } s.CharsMax = value case float64: @@ -85,7 +86,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CharsTotal", err) } s.CharsTotal = value case float64: @@ -100,7 +101,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -115,7 +116,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocMax", err) } s.DocMax = value case float64: @@ -130,7 +131,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocTotal", err) } s.DocTotal = value case float64: @@ -145,7 +146,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexCount", err) } s.IndexCount = value case float64: @@ -155,7 +156,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case "lang": if err := dec.Decode(&s.Lang); err != nil { - return err + return fmt.Errorf("%s | %w", "Lang", err) } case "lines_max": @@ -165,7 +166,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LinesMax", err) } s.LinesMax = value case float64: @@ -180,7 +181,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LinesTotal", err) } s.LinesTotal = value case float64: @@ -190,7 +191,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "scriptless_count": @@ -200,7 +201,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptlessCount", err) } s.ScriptlessCount = value case float64: @@ -215,7 +216,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShadowedCount", err) } s.ShadowedCount = value case float64: @@ -230,7 +231,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SourceMax", err) } s.SourceMax = value case float64: @@ -245,7 +246,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SourceTotal", err) } s.SourceTotal = value case float64: diff --git a/typedapi/types/s3repository.go b/typedapi/types/s3repository.go new file mode 100644 index 0000000000..fb15440b84 --- /dev/null +++ b/typedapi/types/s3repository.go @@ -0,0 +1,94 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" +) + +// S3Repository type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotRepository.ts#L50-L53 +type S3Repository struct { + Settings S3RepositorySettings `json:"settings"` + Type string `json:"type,omitempty"` + Uuid *string `json:"uuid,omitempty"` +} + +func (s *S3Repository) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return fmt.Errorf("%s | %w", "Settings", err) + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return fmt.Errorf("%s | %w", "Type", err) + } + + case "uuid": + if err := dec.Decode(&s.Uuid); err != nil { + return fmt.Errorf("%s | %w", "Uuid", err) + } + + } + } + return nil +} + +// MarshalJSON override marshalling to include literal value +func (s S3Repository) MarshalJSON() ([]byte, error) { + type innerS3Repository S3Repository + tmp := innerS3Repository{ + Settings: s.Settings, + Type: s.Type, + Uuid: s.Uuid, + } + + tmp.Type = "s3" + + return json.Marshal(tmp) +} + +// NewS3Repository returns a S3Repository. +func NewS3Repository() *S3Repository { + r := &S3Repository{} + + return r +} diff --git a/typedapi/types/s3repositorysettings.go b/typedapi/types/s3repositorysettings.go new file mode 100644 index 0000000000..f6ceb7033f --- /dev/null +++ b/typedapi/types/s3repositorysettings.go @@ -0,0 +1,197 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// S3RepositorySettings type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotRepository.ts#L93-L102 +type S3RepositorySettings struct { + BasePath *string `json:"base_path,omitempty"` + Bucket string `json:"bucket"` + BufferSize ByteSize `json:"buffer_size,omitempty"` + CannedAcl *string `json:"canned_acl,omitempty"` + ChunkSize ByteSize `json:"chunk_size,omitempty"` + Client *string `json:"client,omitempty"` + Compress *bool `json:"compress,omitempty"` + MaxRestoreBytesPerSec ByteSize `json:"max_restore_bytes_per_sec,omitempty"` + MaxSnapshotBytesPerSec ByteSize `json:"max_snapshot_bytes_per_sec,omitempty"` + Readonly *bool `json:"readonly,omitempty"` + ServerSideEncryption *bool `json:"server_side_encryption,omitempty"` + StorageClass *string `json:"storage_class,omitempty"` +} + +func (s *S3RepositorySettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "base_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "BasePath", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.BasePath = &o + + case "bucket": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Bucket", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Bucket = o + + case "buffer_size": + if err := dec.Decode(&s.BufferSize); err != nil { + return fmt.Errorf("%s | %w", "BufferSize", err) + } + + case "canned_acl": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "CannedAcl", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.CannedAcl = &o + + case "chunk_size": + if err := dec.Decode(&s.ChunkSize); err != nil { + return fmt.Errorf("%s | %w", "ChunkSize", err) + } + + case "client": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Client", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Client = &o + + case "compress": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Compress", err) + } + s.Compress = &value + case bool: + s.Compress = &v + } + + case "max_restore_bytes_per_sec": + if err := dec.Decode(&s.MaxRestoreBytesPerSec); err != nil { + return fmt.Errorf("%s | %w", "MaxRestoreBytesPerSec", err) + } + + case "max_snapshot_bytes_per_sec": + if err := dec.Decode(&s.MaxSnapshotBytesPerSec); err != nil { + return fmt.Errorf("%s | %w", "MaxSnapshotBytesPerSec", err) + } + + case "readonly": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Readonly", err) + } + s.Readonly = &value + case bool: + s.Readonly = &v + } + + case "server_side_encryption": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "ServerSideEncryption", err) + } + s.ServerSideEncryption = &value + case bool: + s.ServerSideEncryption = &v + } + + case "storage_class": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "StorageClass", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.StorageClass = &o + + } + } + return nil +} + +// NewS3RepositorySettings returns a S3RepositorySettings. +func NewS3RepositorySettings() *S3RepositorySettings { + r := &S3RepositorySettings{} + + return r +} diff --git a/typedapi/types/samplediversity.go b/typedapi/types/samplediversity.go index 82f2b1fe4f..fcc76309ef 100644 --- a/typedapi/types/samplediversity.go +++ b/typedapi/types/samplediversity.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SampleDiversity type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/graph/_types/ExploreControls.ts#L51-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/graph/_types/ExploreControls.ts#L51-L54 type SampleDiversity struct { Field string `json:"field"` MaxDocsPerValue int `json:"max_docs_per_value"` @@ -53,7 +54,7 @@ func (s *SampleDiversity) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "max_docs_per_value": @@ -64,7 +65,7 @@ func (s *SampleDiversity) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxDocsPerValue", err) } s.MaxDocsPerValue = value case float64: diff --git a/typedapi/types/sampleraggregate.go b/typedapi/types/sampleraggregate.go index 00c6eac894..bfe77df2b5 100644 --- a/typedapi/types/sampleraggregate.go +++ b/typedapi/types/sampleraggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // SamplerAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L498-L499 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L498-L499 type SamplerAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *SamplerAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -71,7 +71,7 @@ func (s *SamplerAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } default: @@ -88,490 +88,490 @@ func (s *SamplerAggregate) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -581,7 +581,7 @@ func (s *SamplerAggregate) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/sampleraggregation.go b/typedapi/types/sampleraggregation.go index 6c87d56332..e5263df293 100644 --- a/typedapi/types/sampleraggregation.go +++ b/typedapi/types/sampleraggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SamplerAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L727-L733 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L727-L733 type SamplerAggregation struct { Meta Metadata `json:"meta,omitempty"` Name *string `json:"name,omitempty"` @@ -56,13 +57,13 @@ func (s *SamplerAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *SamplerAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardSize", err) } s.ShardSize = &value case float64: diff --git a/typedapi/types/scalarvalue.go b/typedapi/types/scalarvalue.go index 3ff486d032..8aeea3b4e3 100644 --- a/typedapi/types/scalarvalue.go +++ b/typedapi/types/scalarvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -28,5 +28,5 @@ package types // bool // nil // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L39-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L39-L43 type ScalarValue interface{} diff --git a/typedapi/types/scaledfloatnumberproperty.go b/typedapi/types/scaledfloatnumberproperty.go index 7a2efc5043..e7508215bf 100644 --- a/typedapi/types/scaledfloatnumberproperty.go +++ b/typedapi/types/scaledfloatnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // ScaledFloatNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L174-L178 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L174-L178 type ScaledFloatNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -85,7 +86,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -101,7 +102,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -114,13 +115,13 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -131,7 +132,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -140,7 +141,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -458,7 +459,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -473,7 +474,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -487,7 +488,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -499,7 +500,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": @@ -509,7 +510,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } f := Float64(value) s.NullValue = &f @@ -520,7 +521,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case "on_script_error": if err := dec.Decode(&s.OnScriptError); err != nil { - return err + return fmt.Errorf("%s | %w", "OnScriptError", err) } case "properties": @@ -837,7 +838,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScalingFactor", err) } f := Float64(value) s.ScalingFactor = &f @@ -849,7 +850,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -858,7 +859,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -867,7 +868,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -875,7 +876,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -885,7 +886,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -901,7 +902,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -915,7 +916,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesDimension", err) } s.TimeSeriesDimension = &value case bool: @@ -924,12 +925,12 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case "time_series_metric": if err := dec.Decode(&s.TimeSeriesMetric); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesMetric", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/schedulecontainer.go b/typedapi/types/schedulecontainer.go index 9c9b892a44..ff2ea3c5d0 100644 --- a/typedapi/types/schedulecontainer.go +++ b/typedapi/types/schedulecontainer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ScheduleContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Schedule.ts#L80-L91 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Schedule.ts#L80-L91 type ScheduleContainer struct { Cron *string `json:"cron,omitempty"` Daily *DailySchedule `json:"daily,omitempty"` @@ -57,22 +58,22 @@ func (s *ScheduleContainer) UnmarshalJSON(data []byte) error { case "cron": if err := dec.Decode(&s.Cron); err != nil { - return err + return fmt.Errorf("%s | %w", "Cron", err) } case "daily": if err := dec.Decode(&s.Daily); err != nil { - return err + return fmt.Errorf("%s | %w", "Daily", err) } case "hourly": if err := dec.Decode(&s.Hourly); err != nil { - return err + return fmt.Errorf("%s | %w", "Hourly", err) } case "interval": if err := dec.Decode(&s.Interval); err != nil { - return err + return fmt.Errorf("%s | %w", "Interval", err) } case "monthly": @@ -81,13 +82,13 @@ func (s *ScheduleContainer) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewTimeOfMonth() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Monthly", err) } s.Monthly = append(s.Monthly, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Monthly); err != nil { - return err + return fmt.Errorf("%s | %w", "Monthly", err) } } @@ -97,13 +98,13 @@ func (s *ScheduleContainer) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewTimeOfWeek() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Weekly", err) } s.Weekly = append(s.Weekly, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Weekly); err != nil { - return err + return fmt.Errorf("%s | %w", "Weekly", err) } } @@ -113,13 +114,13 @@ func (s *ScheduleContainer) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewTimeOfYear() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Yearly", err) } s.Yearly = append(s.Yearly, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Yearly); err != nil { - return err + return fmt.Errorf("%s | %w", "Yearly", err) } } diff --git a/typedapi/types/scheduletimeofday.go b/typedapi/types/scheduletimeofday.go index dcb0704c86..9885eae118 100644 --- a/typedapi/types/scheduletimeofday.go +++ b/typedapi/types/scheduletimeofday.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // HourAndMinute // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Schedule.ts#L98-L103 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Schedule.ts#L98-L103 type ScheduleTimeOfDay interface{} diff --git a/typedapi/types/scheduletriggerevent.go b/typedapi/types/scheduletriggerevent.go index 573fc61b4e..b1847a1124 100644 --- a/typedapi/types/scheduletriggerevent.go +++ b/typedapi/types/scheduletriggerevent.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ScheduleTriggerEvent type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Schedule.ts#L93-L96 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Schedule.ts#L93-L96 type ScheduleTriggerEvent struct { ScheduledTime DateTime `json:"scheduled_time"` TriggeredTime DateTime `json:"triggered_time,omitempty"` @@ -52,12 +53,12 @@ func (s *ScheduleTriggerEvent) UnmarshalJSON(data []byte) error { case "scheduled_time": if err := dec.Decode(&s.ScheduledTime); err != nil { - return err + return fmt.Errorf("%s | %w", "ScheduledTime", err) } case "triggered_time": if err := dec.Decode(&s.TriggeredTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TriggeredTime", err) } } diff --git a/typedapi/types/scoresort.go b/typedapi/types/scoresort.go index 0e6f2cc250..e9a603c345 100644 --- a/typedapi/types/scoresort.go +++ b/typedapi/types/scoresort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // ScoreSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/sort.ts#L55-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/sort.ts#L55-L57 type ScoreSort struct { Order *sortorder.SortOrder `json:"order,omitempty"` } diff --git a/typedapi/types/script.go b/typedapi/types/script.go index ab3a6618c5..bfc691585f 100644 --- a/typedapi/types/script.go +++ b/typedapi/types/script.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // InlineScript // StoredScriptId // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Scripting.ts#L88-L89 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Scripting.ts#L88-L89 type Script interface{} diff --git a/typedapi/types/scriptcache.go b/typedapi/types/scriptcache.go index bf9aeb6175..7f1da5729a 100644 --- a/typedapi/types/scriptcache.go +++ b/typedapi/types/scriptcache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ScriptCache type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L1031-L1045 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L1031-L1045 type ScriptCache struct { // CacheEvictions Total number of times the script cache has evicted old data. CacheEvictions *int64 `json:"cache_evictions,omitempty"` @@ -64,7 +65,7 @@ func (s *ScriptCache) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CacheEvictions", err) } s.CacheEvictions = &value case float64: @@ -79,7 +80,7 @@ func (s *ScriptCache) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CompilationLimitTriggered", err) } s.CompilationLimitTriggered = &value case float64: @@ -94,7 +95,7 @@ func (s *ScriptCache) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Compilations", err) } s.Compilations = &value case float64: @@ -105,7 +106,7 @@ func (s *ScriptCache) UnmarshalJSON(data []byte) error { case "context": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Context", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/scriptcondition.go b/typedapi/types/scriptcondition.go index fbdd955fb6..d27a16390c 100644 --- a/typedapi/types/scriptcondition.go +++ b/typedapi/types/scriptcondition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ScriptCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Conditions.ts#L76-L84 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Conditions.ts#L76-L84 type ScriptCondition struct { Id *string `json:"id,omitempty"` Lang *string `json:"lang,omitempty"` @@ -56,7 +57,7 @@ func (s *ScriptCondition) UnmarshalJSON(data []byte) error { case "id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,7 +69,7 @@ func (s *ScriptCondition) UnmarshalJSON(data []byte) error { case "lang": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Lang", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,13 +83,13 @@ func (s *ScriptCondition) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "source": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/scriptedheuristic.go b/typedapi/types/scriptedheuristic.go index b338346383..bb1570f6f4 100644 --- a/typedapi/types/scriptedheuristic.go +++ b/typedapi/types/scriptedheuristic.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ScriptedHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L766-L768 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L766-L768 type ScriptedHeuristic struct { Script Script `json:"script"` } @@ -52,7 +53,7 @@ func (s *ScriptedHeuristic) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -61,7 +62,7 @@ func (s *ScriptedHeuristic) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -70,7 +71,7 @@ func (s *ScriptedHeuristic) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -78,7 +79,7 @@ func (s *ScriptedHeuristic) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/scriptedmetricaggregate.go b/typedapi/types/scriptedmetricaggregate.go index 3e89ce0000..0fd9fbb88f 100644 --- a/typedapi/types/scriptedmetricaggregate.go +++ b/typedapi/types/scriptedmetricaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ScriptedMetricAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L649-L652 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L649-L652 type ScriptedMetricAggregate struct { Meta Metadata `json:"meta,omitempty"` Value json.RawMessage `json:"value,omitempty"` @@ -52,12 +53,12 @@ func (s *ScriptedMetricAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } } diff --git a/typedapi/types/scriptedmetricaggregation.go b/typedapi/types/scriptedmetricaggregation.go index 6c94a36610..7aedeecad9 100644 --- a/typedapi/types/scriptedmetricaggregation.go +++ b/typedapi/types/scriptedmetricaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ScriptedMetricAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L254-L280 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L254-L280 type ScriptedMetricAggregation struct { // CombineScript Runs once on each shard after document collection is complete. // Allows the aggregation to consolidate the state returned from each shard. @@ -76,7 +77,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { case "combine_script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "CombineScript", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -85,7 +86,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "CombineScript", err) } switch t { @@ -94,7 +95,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CombineScript", err) } s.CombineScript = o @@ -102,7 +103,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CombineScript", err) } s.CombineScript = o @@ -111,13 +112,13 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "init_script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "InitScript", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -126,7 +127,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "InitScript", err) } switch t { @@ -135,7 +136,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "InitScript", err) } s.InitScript = o @@ -143,7 +144,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "InitScript", err) } s.InitScript = o @@ -153,7 +154,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { case "map_script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "MapScript", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -162,7 +163,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "MapScript", err) } switch t { @@ -171,7 +172,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "MapScript", err) } s.MapScript = o @@ -179,7 +180,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "MapScript", err) } s.MapScript = o @@ -188,7 +189,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "params": @@ -196,13 +197,13 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "reduce_script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "ReduceScript", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -211,7 +212,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "ReduceScript", err) } switch t { @@ -220,7 +221,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "ReduceScript", err) } s.ReduceScript = o @@ -228,7 +229,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "ReduceScript", err) } s.ReduceScript = o @@ -238,7 +239,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -247,7 +248,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -256,7 +257,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -264,7 +265,7 @@ func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/scriptfield.go b/typedapi/types/scriptfield.go index 649750a572..fae7002bff 100644 --- a/typedapi/types/scriptfield.go +++ b/typedapi/types/scriptfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ScriptField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Scripting.ts#L91-L94 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Scripting.ts#L91-L94 type ScriptField struct { IgnoreFailure *bool `json:"ignore_failure,omitempty"` Script Script `json:"script"` @@ -58,7 +59,7 @@ func (s *ScriptField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -68,7 +69,7 @@ func (s *ScriptField) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -77,7 +78,7 @@ func (s *ScriptField) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -86,7 +87,7 @@ func (s *ScriptField) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -94,7 +95,7 @@ func (s *ScriptField) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/scripting.go b/typedapi/types/scripting.go index ad0dd5fd97..ac6528d0bf 100644 --- a/typedapi/types/scripting.go +++ b/typedapi/types/scripting.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Scripting type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L977-L995 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L977-L995 type Scripting struct { // CacheEvictions Total number of times the script cache has evicted old data. CacheEvictions *int64 `json:"cache_evictions,omitempty"` @@ -66,7 +67,7 @@ func (s *Scripting) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CacheEvictions", err) } s.CacheEvictions = &value case float64: @@ -81,7 +82,7 @@ func (s *Scripting) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CompilationLimitTriggered", err) } s.CompilationLimitTriggered = &value case float64: @@ -96,7 +97,7 @@ func (s *Scripting) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Compilations", err) } s.Compilations = &value case float64: @@ -109,12 +110,12 @@ func (s *Scripting) UnmarshalJSON(data []byte) error { s.CompilationsHistory = make(map[string]int64, 0) } if err := dec.Decode(&s.CompilationsHistory); err != nil { - return err + return fmt.Errorf("%s | %w", "CompilationsHistory", err) } case "contexts": if err := dec.Decode(&s.Contexts); err != nil { - return err + return fmt.Errorf("%s | %w", "Contexts", err) } } diff --git a/typedapi/types/scriptprocessor.go b/typedapi/types/scriptprocessor.go index ec723937e1..46d9d0a419 100644 --- a/typedapi/types/scriptprocessor.go +++ b/typedapi/types/scriptprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ScriptProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L1001-L1021 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L1001-L1021 type ScriptProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -74,7 +75,7 @@ func (s *ScriptProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,13 +86,13 @@ func (s *ScriptProcessor) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -107,7 +108,7 @@ func (s *ScriptProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -117,7 +118,7 @@ func (s *ScriptProcessor) UnmarshalJSON(data []byte) error { case "lang": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Lang", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -128,7 +129,7 @@ func (s *ScriptProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "params": @@ -136,13 +137,13 @@ func (s *ScriptProcessor) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "source": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -154,7 +155,7 @@ func (s *ScriptProcessor) UnmarshalJSON(data []byte) error { case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/scriptquery.go b/typedapi/types/scriptquery.go index 9931ea6486..9bfc9ccdf5 100644 --- a/typedapi/types/scriptquery.go +++ b/typedapi/types/scriptquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ScriptQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L318-L324 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L318-L324 type ScriptQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -66,7 +67,7 @@ func (s *ScriptQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -78,7 +79,7 @@ func (s *ScriptQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -90,7 +91,7 @@ func (s *ScriptQuery) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -99,7 +100,7 @@ func (s *ScriptQuery) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -108,7 +109,7 @@ func (s *ScriptQuery) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -116,7 +117,7 @@ func (s *ScriptQuery) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/scriptscorefunction.go b/typedapi/types/scriptscorefunction.go index 6dd0234885..3952ac3b1b 100644 --- a/typedapi/types/scriptscorefunction.go +++ b/typedapi/types/scriptscorefunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ScriptScoreFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/compound.ts#L120-L125 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/compound.ts#L120-L125 type ScriptScoreFunction struct { // Script A script that computes a score. Script Script `json:"script"` @@ -53,7 +54,7 @@ func (s *ScriptScoreFunction) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -62,7 +63,7 @@ func (s *ScriptScoreFunction) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -71,7 +72,7 @@ func (s *ScriptScoreFunction) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -79,7 +80,7 @@ func (s *ScriptScoreFunction) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/scriptscorequery.go b/typedapi/types/scriptscorequery.go index afa501914f..c8aa926def 100644 --- a/typedapi/types/scriptscorequery.go +++ b/typedapi/types/scriptscorequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ScriptScoreQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L326-L340 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L326-L340 type ScriptScoreQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -72,7 +73,7 @@ func (s *ScriptScoreQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -88,7 +89,7 @@ func (s *ScriptScoreQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinScore", err) } f := float32(value) s.MinScore = &f @@ -99,13 +100,13 @@ func (s *ScriptScoreQuery) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -117,7 +118,7 @@ func (s *ScriptScoreQuery) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -126,7 +127,7 @@ func (s *ScriptScoreQuery) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -135,7 +136,7 @@ func (s *ScriptScoreQuery) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -143,7 +144,7 @@ func (s *ScriptScoreQuery) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/scriptsort.go b/typedapi/types/scriptsort.go index d20bba116a..1aea57b4b2 100644 --- a/typedapi/types/scriptsort.go +++ b/typedapi/types/scriptsort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/scriptsorttype" @@ -33,7 +34,7 @@ import ( // ScriptSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/sort.ts#L68-L74 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/sort.ts#L68-L74 type ScriptSort struct { Mode *sortmode.SortMode `json:"mode,omitempty"` Nested *NestedSortValue `json:"nested,omitempty"` @@ -59,23 +60,23 @@ func (s *ScriptSort) UnmarshalJSON(data []byte) error { case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } case "nested": if err := dec.Decode(&s.Nested); err != nil { - return err + return fmt.Errorf("%s | %w", "Nested", err) } case "order": if err := dec.Decode(&s.Order); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -84,7 +85,7 @@ func (s *ScriptSort) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -93,7 +94,7 @@ func (s *ScriptSort) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -101,7 +102,7 @@ func (s *ScriptSort) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -110,7 +111,7 @@ func (s *ScriptSort) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/scripttransform.go b/typedapi/types/scripttransform.go index d8b9ae52ae..4be1a34a09 100644 --- a/typedapi/types/scripttransform.go +++ b/typedapi/types/scripttransform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ScriptTransform type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Transform.ts#L36-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Transform.ts#L36-L44 type ScriptTransform struct { Id *string `json:"id,omitempty"` Lang *string `json:"lang,omitempty"` @@ -56,7 +57,7 @@ func (s *ScriptTransform) UnmarshalJSON(data []byte) error { case "id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,7 +69,7 @@ func (s *ScriptTransform) UnmarshalJSON(data []byte) error { case "lang": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Lang", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,13 +83,13 @@ func (s *ScriptTransform) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "source": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/scrollids.go b/typedapi/types/scrollids.go index d54aea1509..70363459d9 100644 --- a/typedapi/types/scrollids.go +++ b/typedapi/types/scrollids.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ScrollIds type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L56-L56 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L56-L56 type ScrollIds []string diff --git a/typedapi/types/searchablesnapshots.go b/typedapi/types/searchablesnapshots.go index 898af881df..1d47687ee8 100644 --- a/typedapi/types/searchablesnapshots.go +++ b/typedapi/types/searchablesnapshots.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SearchableSnapshots type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L428-L432 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L428-L432 type SearchableSnapshots struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -61,7 +62,7 @@ func (s *SearchableSnapshots) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -75,7 +76,7 @@ func (s *SearchableSnapshots) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -90,7 +91,7 @@ func (s *SearchableSnapshots) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FullCopyIndicesCount", err) } s.FullCopyIndicesCount = &value case float64: @@ -106,7 +107,7 @@ func (s *SearchableSnapshots) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesCount", err) } s.IndicesCount = value case float64: @@ -122,7 +123,7 @@ func (s *SearchableSnapshots) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SharedCacheIndicesCount", err) } s.SharedCacheIndicesCount = &value case float64: diff --git a/typedapi/types/searchapplication.go b/typedapi/types/searchapplication.go index 963c6e2e65..f5362e86be 100644 --- a/typedapi/types/searchapplication.go +++ b/typedapi/types/searchapplication.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SearchApplication type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/_types/SearchApplication.ts#L24-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/_types/SearchApplication.ts#L24-L45 type SearchApplication struct { // AnalyticsCollectionName Analytics collection associated to the Search Application. AnalyticsCollectionName *string `json:"analytics_collection_name,omitempty"` @@ -60,27 +61,27 @@ func (s *SearchApplication) UnmarshalJSON(data []byte) error { case "analytics_collection_name": if err := dec.Decode(&s.AnalyticsCollectionName); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalyticsCollectionName", err) } case "indices": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "template": if err := dec.Decode(&s.Template); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } case "updated_at_millis": if err := dec.Decode(&s.UpdatedAtMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "UpdatedAtMillis", err) } } diff --git a/typedapi/types/searchapplicationlistitem.go b/typedapi/types/searchapplicationlistitem.go index b04d39c953..358d3a8618 100644 --- a/typedapi/types/searchapplicationlistitem.go +++ b/typedapi/types/searchapplicationlistitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SearchApplicationListItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/list/SearchApplicationsListResponse.ts#L31-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/list/SearchApplicationsListResponse.ts#L31-L48 type SearchApplicationListItem struct { // AnalyticsCollectionName Analytics collection associated to the Search Application AnalyticsCollectionName *string `json:"analytics_collection_name,omitempty"` @@ -58,22 +59,22 @@ func (s *SearchApplicationListItem) UnmarshalJSON(data []byte) error { case "analytics_collection_name": if err := dec.Decode(&s.AnalyticsCollectionName); err != nil { - return err + return fmt.Errorf("%s | %w", "AnalyticsCollectionName", err) } case "indices": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "updated_at_millis": if err := dec.Decode(&s.UpdatedAtMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "UpdatedAtMillis", err) } } diff --git a/typedapi/types/searchapplicationtemplate.go b/typedapi/types/searchapplicationtemplate.go index 130479ead2..3ad8879143 100644 --- a/typedapi/types/searchapplicationtemplate.go +++ b/typedapi/types/searchapplicationtemplate.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // SearchApplicationTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/search_application/_types/SearchApplication.ts#L47-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/search_application/_types/SearchApplication.ts#L47-L52 type SearchApplicationTemplate struct { // Script The associated mustache template. Script InlineScript `json:"script"` diff --git a/typedapi/types/searchasyoutypeproperty.go b/typedapi/types/searchasyoutypeproperty.go index afe002dac3..084983a971 100644 --- a/typedapi/types/searchasyoutypeproperty.go +++ b/typedapi/types/searchasyoutypeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // SearchAsYouTypeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L197-L207 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L198-L208 type SearchAsYouTypeProperty struct { Analyzer *string `json:"analyzer,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -74,7 +75,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -89,19 +90,19 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -419,7 +420,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -434,7 +435,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -443,7 +444,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { case "index_options": if err := dec.Decode(&s.IndexOptions); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexOptions", err) } case "max_shingle_size": @@ -454,7 +455,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxShingleSize", err) } s.MaxShingleSize = &value case float64: @@ -467,7 +468,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "norms": @@ -477,7 +478,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Norms", err) } s.Norms = &value case bool: @@ -794,7 +795,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { case "search_analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchAnalyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -806,7 +807,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { case "search_quote_analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchQuoteAnalyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -818,7 +819,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -834,7 +835,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -843,12 +844,12 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { case "term_vector": if err := dec.Decode(&s.TermVector); err != nil { - return err + return fmt.Errorf("%s | %w", "TermVector", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/searchidle.go b/typedapi/types/searchidle.go index 45073ec541..13f11cb05a 100644 --- a/typedapi/types/searchidle.go +++ b/typedapi/types/searchidle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SearchIdle type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L239-L242 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L243-L246 type SearchIdle struct { After Duration `json:"after,omitempty"` } @@ -51,7 +52,7 @@ func (s *SearchIdle) UnmarshalJSON(data []byte) error { case "after": if err := dec.Decode(&s.After); err != nil { - return err + return fmt.Errorf("%s | %w", "After", err) } } diff --git a/typedapi/types/searchinput.go b/typedapi/types/searchinput.go index 701ed98595..f5b0c37f0f 100644 --- a/typedapi/types/searchinput.go +++ b/typedapi/types/searchinput.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SearchInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L112-L116 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L112-L116 type SearchInput struct { Extract []string `json:"extract,omitempty"` Request SearchInputRequestDefinition `json:"request"` @@ -53,17 +54,17 @@ func (s *SearchInput) UnmarshalJSON(data []byte) error { case "extract": if err := dec.Decode(&s.Extract); err != nil { - return err + return fmt.Errorf("%s | %w", "Extract", err) } case "request": if err := dec.Decode(&s.Request); err != nil { - return err + return fmt.Errorf("%s | %w", "Request", err) } case "timeout": if err := dec.Decode(&s.Timeout); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } } diff --git a/typedapi/types/searchinputrequestbody.go b/typedapi/types/searchinputrequestbody.go index ee1ad35005..1fa9c8cd8d 100644 --- a/typedapi/types/searchinputrequestbody.go +++ b/typedapi/types/searchinputrequestbody.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // SearchInputRequestBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L147-L149 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L147-L149 type SearchInputRequestBody struct { Query Query `json:"query"` } diff --git a/typedapi/types/searchinputrequestdefinition.go b/typedapi/types/searchinputrequestdefinition.go index 55a96df9df..d5fb17d5a9 100644 --- a/typedapi/types/searchinputrequestdefinition.go +++ b/typedapi/types/searchinputrequestdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SearchInputRequestDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L118-L125 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L118-L125 type SearchInputRequestDefinition struct { Body *SearchInputRequestBody `json:"body,omitempty"` Indices []string `json:"indices,omitempty"` @@ -59,17 +60,17 @@ func (s *SearchInputRequestDefinition) UnmarshalJSON(data []byte) error { case "body": if err := dec.Decode(&s.Body); err != nil { - return err + return fmt.Errorf("%s | %w", "Body", err) } case "indices": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "indices_options": if err := dec.Decode(&s.IndicesOptions); err != nil { - return err + return fmt.Errorf("%s | %w", "IndicesOptions", err) } case "rest_total_hits_as_int": @@ -79,7 +80,7 @@ func (s *SearchInputRequestDefinition) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RestTotalHitsAsInt", err) } s.RestTotalHitsAsInt = &value case bool: @@ -88,12 +89,12 @@ func (s *SearchInputRequestDefinition) UnmarshalJSON(data []byte) error { case "search_type": if err := dec.Decode(&s.SearchType); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchType", err) } case "template": if err := dec.Decode(&s.Template); err != nil { - return err + return fmt.Errorf("%s | %w", "Template", err) } } diff --git a/typedapi/types/searchprofile.go b/typedapi/types/searchprofile.go index 853d8bdfc2..35c6bb0013 100644 --- a/typedapi/types/searchprofile.go +++ b/typedapi/types/searchprofile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SearchProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/profile.ts#L126-L130 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/profile.ts#L126-L130 type SearchProfile struct { Collector []Collector `json:"collector"` Query []QueryProfile `json:"query"` @@ -54,12 +55,12 @@ func (s *SearchProfile) UnmarshalJSON(data []byte) error { case "collector": if err := dec.Decode(&s.Collector); err != nil { - return err + return fmt.Errorf("%s | %w", "Collector", err) } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "rewrite_time": @@ -69,7 +70,7 @@ func (s *SearchProfile) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RewriteTime", err) } s.RewriteTime = value case float64: diff --git a/typedapi/types/searchstats.go b/typedapi/types/searchstats.go index d7bcfe9702..874dbacd3d 100644 --- a/typedapi/types/searchstats.go +++ b/typedapi/types/searchstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SearchStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L252-L271 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L252-L271 type SearchStats struct { FetchCurrent int64 `json:"fetch_current"` FetchTime Duration `json:"fetch_time,omitempty"` @@ -74,7 +75,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FetchCurrent", err) } s.FetchCurrent = value case float64: @@ -84,12 +85,12 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { case "fetch_time": if err := dec.Decode(&s.FetchTime); err != nil { - return err + return fmt.Errorf("%s | %w", "FetchTime", err) } case "fetch_time_in_millis": if err := dec.Decode(&s.FetchTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "FetchTimeInMillis", err) } case "fetch_total": @@ -99,7 +100,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FetchTotal", err) } s.FetchTotal = value case float64: @@ -112,7 +113,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { s.Groups = make(map[string]SearchStats, 0) } if err := dec.Decode(&s.Groups); err != nil { - return err + return fmt.Errorf("%s | %w", "Groups", err) } case "open_contexts": @@ -122,7 +123,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "OpenContexts", err) } s.OpenContexts = &value case float64: @@ -137,7 +138,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "QueryCurrent", err) } s.QueryCurrent = value case float64: @@ -147,12 +148,12 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { case "query_time": if err := dec.Decode(&s.QueryTime); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryTime", err) } case "query_time_in_millis": if err := dec.Decode(&s.QueryTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryTimeInMillis", err) } case "query_total": @@ -162,7 +163,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "QueryTotal", err) } s.QueryTotal = value case float64: @@ -177,7 +178,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollCurrent", err) } s.ScrollCurrent = value case float64: @@ -187,12 +188,12 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { case "scroll_time": if err := dec.Decode(&s.ScrollTime); err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollTime", err) } case "scroll_time_in_millis": if err := dec.Decode(&s.ScrollTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollTimeInMillis", err) } case "scroll_total": @@ -202,7 +203,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScrollTotal", err) } s.ScrollTotal = value case float64: @@ -217,7 +218,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SuggestCurrent", err) } s.SuggestCurrent = value case float64: @@ -227,12 +228,12 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { case "suggest_time": if err := dec.Decode(&s.SuggestTime); err != nil { - return err + return fmt.Errorf("%s | %w", "SuggestTime", err) } case "suggest_time_in_millis": if err := dec.Decode(&s.SuggestTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "SuggestTimeInMillis", err) } case "suggest_total": @@ -242,7 +243,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SuggestTotal", err) } s.SuggestTotal = value case float64: diff --git a/typedapi/types/searchtemplaterequestbody.go b/typedapi/types/searchtemplaterequestbody.go index ce259de325..2c8bc3c1c9 100644 --- a/typedapi/types/searchtemplaterequestbody.go +++ b/typedapi/types/searchtemplaterequestbody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SearchTemplateRequestBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L128-L145 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L128-L145 type SearchTemplateRequestBody struct { Explain *bool `json:"explain,omitempty"` // Id ID of the search template to use. If no source is specified, @@ -66,7 +67,7 @@ func (s *SearchTemplateRequestBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Explain", err) } s.Explain = &value case bool: @@ -75,7 +76,7 @@ func (s *SearchTemplateRequestBody) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "params": @@ -83,7 +84,7 @@ func (s *SearchTemplateRequestBody) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "profile": @@ -93,7 +94,7 @@ func (s *SearchTemplateRequestBody) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } s.Profile = &value case bool: @@ -103,7 +104,7 @@ func (s *SearchTemplateRequestBody) UnmarshalJSON(data []byte) error { case "source": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/searchtransform.go b/typedapi/types/searchtransform.go index 689670a8c1..3d4baddcad 100644 --- a/typedapi/types/searchtransform.go +++ b/typedapi/types/searchtransform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SearchTransform type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Transform.ts#L46-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Transform.ts#L46-L49 type SearchTransform struct { Request SearchInputRequestDefinition `json:"request"` Timeout Duration `json:"timeout"` @@ -52,12 +53,12 @@ func (s *SearchTransform) UnmarshalJSON(data []byte) error { case "request": if err := dec.Decode(&s.Request); err != nil { - return err + return fmt.Errorf("%s | %w", "Request", err) } case "timeout": if err := dec.Decode(&s.Timeout); err != nil { - return err + return fmt.Errorf("%s | %w", "Timeout", err) } } diff --git a/typedapi/types/security.go b/typedapi/types/security.go index c06208a1b4..de0a107a47 100644 --- a/typedapi/types/security.go +++ b/typedapi/types/security.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Security type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L434-L447 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L434-L447 type Security struct { Anonymous FeatureToggle `json:"anonymous"` ApiKeyService FeatureToggle `json:"api_key_service"` @@ -65,17 +66,17 @@ func (s *Security) UnmarshalJSON(data []byte) error { case "anonymous": if err := dec.Decode(&s.Anonymous); err != nil { - return err + return fmt.Errorf("%s | %w", "Anonymous", err) } case "api_key_service": if err := dec.Decode(&s.ApiKeyService); err != nil { - return err + return fmt.Errorf("%s | %w", "ApiKeyService", err) } case "audit": if err := dec.Decode(&s.Audit); err != nil { - return err + return fmt.Errorf("%s | %w", "Audit", err) } case "available": @@ -85,7 +86,7 @@ func (s *Security) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -99,7 +100,7 @@ func (s *Security) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -108,17 +109,17 @@ func (s *Security) UnmarshalJSON(data []byte) error { case "fips_140": if err := dec.Decode(&s.Fips140); err != nil { - return err + return fmt.Errorf("%s | %w", "Fips140", err) } case "ipfilter": if err := dec.Decode(&s.Ipfilter); err != nil { - return err + return fmt.Errorf("%s | %w", "Ipfilter", err) } case "operator_privileges": if err := dec.Decode(&s.OperatorPrivileges); err != nil { - return err + return fmt.Errorf("%s | %w", "OperatorPrivileges", err) } case "realms": @@ -126,7 +127,7 @@ func (s *Security) UnmarshalJSON(data []byte) error { s.Realms = make(map[string]XpackRealm, 0) } if err := dec.Decode(&s.Realms); err != nil { - return err + return fmt.Errorf("%s | %w", "Realms", err) } case "role_mapping": @@ -134,27 +135,27 @@ func (s *Security) UnmarshalJSON(data []byte) error { s.RoleMapping = make(map[string]XpackRoleMapping, 0) } if err := dec.Decode(&s.RoleMapping); err != nil { - return err + return fmt.Errorf("%s | %w", "RoleMapping", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "ssl": if err := dec.Decode(&s.Ssl); err != nil { - return err + return fmt.Errorf("%s | %w", "Ssl", err) } case "system_key": if err := dec.Decode(&s.SystemKey); err != nil { - return err + return fmt.Errorf("%s | %w", "SystemKey", err) } case "token_service": if err := dec.Decode(&s.TokenService); err != nil { - return err + return fmt.Errorf("%s | %w", "TokenService", err) } } diff --git a/typedapi/types/securityrolemapping.go b/typedapi/types/securityrolemapping.go index 68f538a526..4a700cd600 100644 --- a/typedapi/types/securityrolemapping.go +++ b/typedapi/types/securityrolemapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SecurityRoleMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/RoleMapping.ts#L25-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/RoleMapping.ts#L25-L31 type SecurityRoleMapping struct { Enabled bool `json:"enabled"` Metadata Metadata `json:"metadata"` @@ -61,7 +62,7 @@ func (s *SecurityRoleMapping) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -70,22 +71,22 @@ func (s *SecurityRoleMapping) UnmarshalJSON(data []byte) error { case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "role_templates": if err := dec.Decode(&s.RoleTemplates); err != nil { - return err + return fmt.Errorf("%s | %w", "RoleTemplates", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "rules": if err := dec.Decode(&s.Rules); err != nil { - return err + return fmt.Errorf("%s | %w", "Rules", err) } } diff --git a/typedapi/types/securityroles.go b/typedapi/types/securityroles.go index 0d51c02f19..68d09721e6 100644 --- a/typedapi/types/securityroles.go +++ b/typedapi/types/securityroles.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // SecurityRoles type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L296-L300 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L296-L300 type SecurityRoles struct { Dls SecurityRolesDls `json:"dls"` File SecurityRolesFile `json:"file"` diff --git a/typedapi/types/securityrolesdls.go b/typedapi/types/securityrolesdls.go index 7f5892b071..9295091a42 100644 --- a/typedapi/types/securityrolesdls.go +++ b/typedapi/types/securityrolesdls.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // SecurityRolesDls type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L308-L310 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L308-L310 type SecurityRolesDls struct { BitSetCache SecurityRolesDlsBitSetCache `json:"bit_set_cache"` } diff --git a/typedapi/types/securityrolesdlsbitsetcache.go b/typedapi/types/securityrolesdlsbitsetcache.go index 6db8cfb375..f7d735f7c2 100644 --- a/typedapi/types/securityrolesdlsbitsetcache.go +++ b/typedapi/types/securityrolesdlsbitsetcache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SecurityRolesDlsBitSetCache type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L312-L316 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L312-L316 type SecurityRolesDlsBitSetCache struct { Count int `json:"count"` Memory ByteSize `json:"memory,omitempty"` @@ -60,7 +61,7 @@ func (s *SecurityRolesDlsBitSetCache) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -70,12 +71,12 @@ func (s *SecurityRolesDlsBitSetCache) UnmarshalJSON(data []byte) error { case "memory": if err := dec.Decode(&s.Memory); err != nil { - return err + return fmt.Errorf("%s | %w", "Memory", err) } case "memory_in_bytes": if err := dec.Decode(&s.MemoryInBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "MemoryInBytes", err) } } diff --git a/typedapi/types/securityrolesfile.go b/typedapi/types/securityrolesfile.go index 573b113ff6..82a4666cc0 100644 --- a/typedapi/types/securityrolesfile.go +++ b/typedapi/types/securityrolesfile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SecurityRolesFile type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L318-L322 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L318-L322 type SecurityRolesFile struct { Dls bool `json:"dls"` Fls bool `json:"fls"` @@ -59,7 +60,7 @@ func (s *SecurityRolesFile) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Dls", err) } s.Dls = value case bool: @@ -73,7 +74,7 @@ func (s *SecurityRolesFile) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Fls", err) } s.Fls = value case bool: @@ -87,7 +88,7 @@ func (s *SecurityRolesFile) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = value case float64: diff --git a/typedapi/types/securityrolesnative.go b/typedapi/types/securityrolesnative.go index b5ba4dc609..893fd5dd0c 100644 --- a/typedapi/types/securityrolesnative.go +++ b/typedapi/types/securityrolesnative.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SecurityRolesNative type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L302-L306 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L302-L306 type SecurityRolesNative struct { Dls bool `json:"dls"` Fls bool `json:"fls"` @@ -59,7 +60,7 @@ func (s *SecurityRolesNative) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Dls", err) } s.Dls = value case bool: @@ -73,7 +74,7 @@ func (s *SecurityRolesNative) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Fls", err) } s.Fls = value case bool: @@ -87,7 +88,7 @@ func (s *SecurityRolesNative) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = value case float64: diff --git a/typedapi/types/segment.go b/typedapi/types/segment.go index 7f276aaa8d..fad734c550 100644 --- a/typedapi/types/segment.go +++ b/typedapi/types/segment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Segment type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/segments/types.ts#L28-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/segments/types.ts#L28-L38 type Segment struct { Attributes map[string]string `json:"attributes"` Committed bool `json:"committed"` @@ -63,7 +64,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { s.Attributes = make(map[string]string, 0) } if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "committed": @@ -73,7 +74,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Committed", err) } s.Committed = value case bool: @@ -87,7 +88,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Compound", err) } s.Compound = value case bool: @@ -101,7 +102,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DeletedDocs", err) } s.DeletedDocs = value case float64: @@ -117,7 +118,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Generation", err) } s.Generation = value case float64: @@ -132,7 +133,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumDocs", err) } s.NumDocs = value case float64: @@ -147,7 +148,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Search", err) } s.Search = value case bool: @@ -161,7 +162,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SizeInBytes", err) } f := Float64(value) s.SizeInBytes = f @@ -172,7 +173,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/segmentsrecord.go b/typedapi/types/segmentsrecord.go index f34f7a6ca6..96ce3db851 100644 --- a/typedapi/types/segmentsrecord.go +++ b/typedapi/types/segmentsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SegmentsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/segments/types.ts#L22-L107 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/segments/types.ts#L22-L107 type SegmentsRecord struct { // Committed If `true`, the segment is synced to disk. // Segments that are synced can survive a hard reboot. @@ -105,7 +106,7 @@ func (s *SegmentsRecord) UnmarshalJSON(data []byte) error { case "committed", "ic", "isCommitted": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Committed", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -117,7 +118,7 @@ func (s *SegmentsRecord) UnmarshalJSON(data []byte) error { case "compound", "ico", "isCompound": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Compound", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -129,7 +130,7 @@ func (s *SegmentsRecord) UnmarshalJSON(data []byte) error { case "docs.count", "dc", "docsCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DocsCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -141,7 +142,7 @@ func (s *SegmentsRecord) UnmarshalJSON(data []byte) error { case "docs.deleted", "dd", "docsDeleted": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DocsDeleted", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -153,7 +154,7 @@ func (s *SegmentsRecord) UnmarshalJSON(data []byte) error { case "generation", "g", "gen": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Generation", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -164,18 +165,18 @@ func (s *SegmentsRecord) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "index", "i", "idx": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "ip": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -187,7 +188,7 @@ func (s *SegmentsRecord) UnmarshalJSON(data []byte) error { case "prirep", "p", "pr", "primaryOrReplica": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Prirep", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -199,7 +200,7 @@ func (s *SegmentsRecord) UnmarshalJSON(data []byte) error { case "searchable", "is", "isSearchable": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Searchable", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -211,7 +212,7 @@ func (s *SegmentsRecord) UnmarshalJSON(data []byte) error { case "segment", "seg": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Segment", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -223,7 +224,7 @@ func (s *SegmentsRecord) UnmarshalJSON(data []byte) error { case "shard", "s", "sh": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Shard", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -234,17 +235,17 @@ func (s *SegmentsRecord) UnmarshalJSON(data []byte) error { case "size", "si": if err := dec.Decode(&s.Size); err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } case "size.memory", "sm", "sizeMemory": if err := dec.Decode(&s.SizeMemory); err != nil { - return err + return fmt.Errorf("%s | %w", "SizeMemory", err) } case "version", "v": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/segmentsstats.go b/typedapi/types/segmentsstats.go index 2ec175192f..411f76cfe6 100644 --- a/typedapi/types/segmentsstats.go +++ b/typedapi/types/segmentsstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SegmentsStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L273-L366 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L273-L366 type SegmentsStats struct { // Count Total number of segments across all shards assigned to selected nodes. Count int `json:"count"` @@ -126,7 +127,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -136,7 +137,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case "doc_values_memory": if err := dec.Decode(&s.DocValuesMemory); err != nil { - return err + return fmt.Errorf("%s | %w", "DocValuesMemory", err) } case "doc_values_memory_in_bytes": @@ -146,7 +147,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValuesMemoryInBytes", err) } s.DocValuesMemoryInBytes = value case float64: @@ -159,12 +160,12 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { s.FileSizes = make(map[string]ShardFileSizeInfo, 0) } if err := dec.Decode(&s.FileSizes); err != nil { - return err + return fmt.Errorf("%s | %w", "FileSizes", err) } case "fixed_bit_set": if err := dec.Decode(&s.FixedBitSet); err != nil { - return err + return fmt.Errorf("%s | %w", "FixedBitSet", err) } case "fixed_bit_set_memory_in_bytes": @@ -174,7 +175,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FixedBitSetMemoryInBytes", err) } s.FixedBitSetMemoryInBytes = value case float64: @@ -189,7 +190,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexWriterMaxMemoryInBytes", err) } s.IndexWriterMaxMemoryInBytes = &value case float64: @@ -199,7 +200,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case "index_writer_memory": if err := dec.Decode(&s.IndexWriterMemory); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexWriterMemory", err) } case "index_writer_memory_in_bytes": @@ -209,7 +210,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexWriterMemoryInBytes", err) } s.IndexWriterMemoryInBytes = value case float64: @@ -224,7 +225,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxUnsafeAutoIdTimestamp", err) } s.MaxUnsafeAutoIdTimestamp = value case float64: @@ -234,7 +235,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case "memory": if err := dec.Decode(&s.Memory); err != nil { - return err + return fmt.Errorf("%s | %w", "Memory", err) } case "memory_in_bytes": @@ -244,7 +245,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MemoryInBytes", err) } s.MemoryInBytes = value case float64: @@ -254,7 +255,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case "norms_memory": if err := dec.Decode(&s.NormsMemory); err != nil { - return err + return fmt.Errorf("%s | %w", "NormsMemory", err) } case "norms_memory_in_bytes": @@ -264,7 +265,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NormsMemoryInBytes", err) } s.NormsMemoryInBytes = value case float64: @@ -274,7 +275,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case "points_memory": if err := dec.Decode(&s.PointsMemory); err != nil { - return err + return fmt.Errorf("%s | %w", "PointsMemory", err) } case "points_memory_in_bytes": @@ -284,7 +285,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PointsMemoryInBytes", err) } s.PointsMemoryInBytes = value case float64: @@ -299,7 +300,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFieldsMemoryInBytes", err) } s.StoredFieldsMemoryInBytes = value case float64: @@ -309,7 +310,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case "stored_memory": if err := dec.Decode(&s.StoredMemory); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredMemory", err) } case "term_vectors_memory_in_bytes": @@ -319,7 +320,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TermVectorsMemoryInBytes", err) } s.TermVectorsMemoryInBytes = value case float64: @@ -329,12 +330,12 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case "term_vectory_memory": if err := dec.Decode(&s.TermVectoryMemory); err != nil { - return err + return fmt.Errorf("%s | %w", "TermVectoryMemory", err) } case "terms_memory": if err := dec.Decode(&s.TermsMemory); err != nil { - return err + return fmt.Errorf("%s | %w", "TermsMemory", err) } case "terms_memory_in_bytes": @@ -344,7 +345,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TermsMemoryInBytes", err) } s.TermsMemoryInBytes = value case float64: @@ -354,7 +355,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case "version_map_memory": if err := dec.Decode(&s.VersionMapMemory); err != nil { - return err + return fmt.Errorf("%s | %w", "VersionMapMemory", err) } case "version_map_memory_in_bytes": @@ -364,7 +365,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "VersionMapMemoryInBytes", err) } s.VersionMapMemoryInBytes = value case float64: diff --git a/typedapi/types/serialdifferencingaggregation.go b/typedapi/types/serialdifferencingaggregation.go index bc8f88803e..c90796aba4 100644 --- a/typedapi/types/serialdifferencingaggregation.go +++ b/typedapi/types/serialdifferencingaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SerialDifferencingAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L361-L367 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L361-L367 type SerialDifferencingAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -66,13 +67,13 @@ func (s *SerialDifferencingAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,7 +84,7 @@ func (s *SerialDifferencingAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "lag": @@ -94,7 +95,7 @@ func (s *SerialDifferencingAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lag", err) } s.Lag = &value case float64: @@ -104,13 +105,13 @@ func (s *SerialDifferencingAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/serializedclusterstate.go b/typedapi/types/serializedclusterstate.go index 70681b0c96..5041e339e5 100644 --- a/typedapi/types/serializedclusterstate.go +++ b/typedapi/types/serializedclusterstate.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // SerializedClusterState type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L232-L238 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L232-L238 type SerializedClusterState struct { Diffs *SerializedClusterStateDetail `json:"diffs,omitempty"` // FullStates Number of published cluster states. diff --git a/typedapi/types/serializedclusterstatedetail.go b/typedapi/types/serializedclusterstatedetail.go index a3053fbc46..8da2c1bc48 100644 --- a/typedapi/types/serializedclusterstatedetail.go +++ b/typedapi/types/serializedclusterstatedetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SerializedClusterStateDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L240-L246 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L240-L246 type SerializedClusterStateDetail struct { CompressedSize *string `json:"compressed_size,omitempty"` CompressedSizeInBytes *int64 `json:"compressed_size_in_bytes,omitempty"` @@ -57,7 +58,7 @@ func (s *SerializedClusterStateDetail) UnmarshalJSON(data []byte) error { case "compressed_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CompressedSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -73,7 +74,7 @@ func (s *SerializedClusterStateDetail) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CompressedSizeInBytes", err) } s.CompressedSizeInBytes = &value case float64: @@ -88,7 +89,7 @@ func (s *SerializedClusterStateDetail) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = &value case float64: @@ -99,7 +100,7 @@ func (s *SerializedClusterStateDetail) UnmarshalJSON(data []byte) error { case "uncompressed_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UncompressedSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -115,7 +116,7 @@ func (s *SerializedClusterStateDetail) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UncompressedSizeInBytes", err) } s.UncompressedSizeInBytes = &value case float64: diff --git a/typedapi/types/servicetoken.go b/typedapi/types/servicetoken.go index b33533e850..f0f8eb0b76 100644 --- a/typedapi/types/servicetoken.go +++ b/typedapi/types/servicetoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ServiceToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/create_service_token/types.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/create_service_token/types.ts#L22-L25 type ServiceToken struct { Name string `json:"name"` Value string `json:"value"` @@ -53,13 +54,13 @@ func (s *ServiceToken) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/setprocessor.go b/typedapi/types/setprocessor.go index 3cee834e0a..41467d480d 100644 --- a/typedapi/types/setprocessor.go +++ b/typedapi/types/setprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SetProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L1023-L1057 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L1023-L1057 type SetProcessor struct { // CopyFrom The origin field which will be copied to `field`, cannot set `value` // simultaneously. @@ -87,13 +88,13 @@ func (s *SetProcessor) UnmarshalJSON(data []byte) error { case "copy_from": if err := dec.Decode(&s.CopyFrom); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyFrom", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,13 +105,13 @@ func (s *SetProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -126,7 +127,7 @@ func (s *SetProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreEmptyValue", err) } s.IgnoreEmptyValue = &value case bool: @@ -140,7 +141,7 @@ func (s *SetProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -150,7 +151,7 @@ func (s *SetProcessor) UnmarshalJSON(data []byte) error { case "media_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MediaType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -161,7 +162,7 @@ func (s *SetProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "override": @@ -171,7 +172,7 @@ func (s *SetProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Override", err) } s.Override = &value case bool: @@ -181,7 +182,7 @@ func (s *SetProcessor) UnmarshalJSON(data []byte) error { case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -192,7 +193,7 @@ func (s *SetProcessor) UnmarshalJSON(data []byte) error { case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } } diff --git a/typedapi/types/setsecurityuserprocessor.go b/typedapi/types/setsecurityuserprocessor.go index 80054f2a12..a042405431 100644 --- a/typedapi/types/setsecurityuserprocessor.go +++ b/typedapi/types/setsecurityuserprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SetSecurityUserProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L1059-L1068 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L1059-L1068 type SetSecurityUserProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -68,7 +69,7 @@ func (s *SetSecurityUserProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,13 +80,13 @@ func (s *SetSecurityUserProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -101,7 +102,7 @@ func (s *SetSecurityUserProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -110,18 +111,18 @@ func (s *SetSecurityUserProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "properties": if err := dec.Decode(&s.Properties); err != nil { - return err + return fmt.Errorf("%s | %w", "Properties", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/settings.go b/typedapi/types/settings.go index baff268541..5b43a848da 100644 --- a/typedapi/types/settings.go +++ b/typedapi/types/settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Settings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/_types/Transform.ts#L98-L144 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/_types/Transform.ts#L98-L144 type Settings struct { // AlignCheckpoints Specifies whether the transform checkpoint ranges should be optimized for // performance. Such optimization can align @@ -89,7 +90,7 @@ func (s *Settings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AlignCheckpoints", err) } s.AlignCheckpoints = &value case bool: @@ -103,7 +104,7 @@ func (s *Settings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DatesAsEpochMillis", err) } s.DatesAsEpochMillis = &value case bool: @@ -117,7 +118,7 @@ func (s *Settings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DeduceMappings", err) } s.DeduceMappings = &value case bool: @@ -131,7 +132,7 @@ func (s *Settings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocsPerSecond", err) } f := float32(value) s.DocsPerSecond = &f @@ -148,7 +149,7 @@ func (s *Settings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxPageSearchSize", err) } s.MaxPageSearchSize = &value case float64: @@ -163,7 +164,7 @@ func (s *Settings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Unattended", err) } s.Unattended = &value case bool: diff --git a/typedapi/types/settingsanalyze.go b/typedapi/types/settingsanalyze.go index 01c814f1de..f7c91d0068 100644 --- a/typedapi/types/settingsanalyze.go +++ b/typedapi/types/settingsanalyze.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SettingsAnalyze type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L229-L232 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L233-L236 type SettingsAnalyze struct { MaxTokenCount Stringifiedinteger `json:"max_token_count,omitempty"` } @@ -51,7 +52,7 @@ func (s *SettingsAnalyze) UnmarshalJSON(data []byte) error { case "max_token_count": if err := dec.Decode(&s.MaxTokenCount); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTokenCount", err) } } diff --git a/typedapi/types/settingshighlight.go b/typedapi/types/settingshighlight.go index f5b09dcb84..adf67eb7c8 100644 --- a/typedapi/types/settingshighlight.go +++ b/typedapi/types/settingshighlight.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SettingsHighlight type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L224-L227 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L228-L231 type SettingsHighlight struct { MaxAnalyzedOffset *int `json:"max_analyzed_offset,omitempty"` } @@ -58,7 +59,7 @@ func (s *SettingsHighlight) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAnalyzedOffset", err) } s.MaxAnalyzedOffset = &value case float64: diff --git a/typedapi/types/settingsquerystring.go b/typedapi/types/settingsquerystring.go index 5b9a09028c..d28ae83fa5 100644 --- a/typedapi/types/settingsquerystring.go +++ b/typedapi/types/settingsquerystring.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SettingsQueryString type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L244-L246 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L248-L250 type SettingsQueryString struct { Lenient Stringifiedboolean `json:"lenient"` } @@ -51,7 +52,7 @@ func (s *SettingsQueryString) UnmarshalJSON(data []byte) error { case "lenient": if err := dec.Decode(&s.Lenient); err != nil { - return err + return fmt.Errorf("%s | %w", "Lenient", err) } } diff --git a/typedapi/types/settingssearch.go b/typedapi/types/settingssearch.go index 7c5bce3877..36a0334489 100644 --- a/typedapi/types/settingssearch.go +++ b/typedapi/types/settingssearch.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // SettingsSearch type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L234-L237 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L238-L241 type SettingsSearch struct { Idle *SearchIdle `json:"idle,omitempty"` Slowlog *SlowlogSettings `json:"slowlog,omitempty"` diff --git a/typedapi/types/settingssimilarity.go b/typedapi/types/settingssimilarity.go index 92e956d40d..16afc4bc8a 100644 --- a/typedapi/types/settingssimilarity.go +++ b/typedapi/types/settingssimilarity.go @@ -16,26 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types -// SettingsSimilarity type. +// SettingsSimilarity holds the union for the following types: // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L169-L181 -type SettingsSimilarity struct { - Bm25 *SettingsSimilarityBm25 `json:"bm25,omitempty"` - Dfi *SettingsSimilarityDfi `json:"dfi,omitempty"` - Dfr *SettingsSimilarityDfr `json:"dfr,omitempty"` - Ib *SettingsSimilarityIb `json:"ib,omitempty"` - Lmd *SettingsSimilarityLmd `json:"lmd,omitempty"` - Lmj *SettingsSimilarityLmj `json:"lmj,omitempty"` - ScriptedTfidf *SettingsSimilarityScriptedTfidf `json:"scripted_tfidf,omitempty"` -} - -// NewSettingsSimilarity returns a SettingsSimilarity. -func NewSettingsSimilarity() *SettingsSimilarity { - r := &SettingsSimilarity{} - - return r -} +// SettingsSimilarityBm25 +// SettingsSimilarityBoolean +// SettingsSimilarityDfi +// SettingsSimilarityDfr +// SettingsSimilarityIb +// SettingsSimilarityLmd +// SettingsSimilarityLmj +// SettingsSimilarityScripted +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L169-L180 +type SettingsSimilarity interface{} diff --git a/typedapi/types/settingssimilaritybm25.go b/typedapi/types/settingssimilaritybm25.go index 714f3fd891..4599a9623d 100644 --- a/typedapi/types/settingssimilaritybm25.go +++ b/typedapi/types/settingssimilaritybm25.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,18 +24,19 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SettingsSimilarityBm25 type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L183-L188 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L186-L191 type SettingsSimilarityBm25 struct { - B Float64 `json:"b"` - DiscountOverlaps bool `json:"discount_overlaps"` - K1 Float64 `json:"k1"` - Type string `json:"type,omitempty"` + B *Float64 `json:"b,omitempty"` + DiscountOverlaps *bool `json:"discount_overlaps,omitempty"` + K1 *Float64 `json:"k1,omitempty"` + Type string `json:"type,omitempty"` } func (s *SettingsSimilarityBm25) UnmarshalJSON(data []byte) error { @@ -60,13 +61,13 @@ func (s *SettingsSimilarityBm25) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "B", err) } f := Float64(value) - s.B = f + s.B = &f case float64: f := Float64(v) - s.B = f + s.B = &f } case "discount_overlaps": @@ -76,11 +77,11 @@ func (s *SettingsSimilarityBm25) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DiscountOverlaps", err) } - s.DiscountOverlaps = value + s.DiscountOverlaps = &value case bool: - s.DiscountOverlaps = v + s.DiscountOverlaps = &v } case "k1": @@ -90,18 +91,18 @@ func (s *SettingsSimilarityBm25) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "K1", err) } f := Float64(value) - s.K1 = f + s.K1 = &f case float64: f := Float64(v) - s.K1 = f + s.K1 = &f } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/settingssimilarityboolean.go b/typedapi/types/settingssimilarityboolean.go new file mode 100644 index 0000000000..58e7427c7f --- /dev/null +++ b/typedapi/types/settingssimilarityboolean.go @@ -0,0 +1,51 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "encoding/json" +) + +// SettingsSimilarityBoolean type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L182-L184 +type SettingsSimilarityBoolean struct { + Type string `json:"type,omitempty"` +} + +// MarshalJSON override marshalling to include literal value +func (s SettingsSimilarityBoolean) MarshalJSON() ([]byte, error) { + type innerSettingsSimilarityBoolean SettingsSimilarityBoolean + tmp := innerSettingsSimilarityBoolean{ + Type: s.Type, + } + + tmp.Type = "boolean" + + return json.Marshal(tmp) +} + +// NewSettingsSimilarityBoolean returns a SettingsSimilarityBoolean. +func NewSettingsSimilarityBoolean() *SettingsSimilarityBoolean { + r := &SettingsSimilarityBoolean{} + + return r +} diff --git a/typedapi/types/settingssimilaritydfi.go b/typedapi/types/settingssimilaritydfi.go index d8418be11f..95fdf72c9e 100644 --- a/typedapi/types/settingssimilaritydfi.go +++ b/typedapi/types/settingssimilaritydfi.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -28,7 +28,7 @@ import ( // SettingsSimilarityDfi type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L190-L193 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L193-L196 type SettingsSimilarityDfi struct { IndependenceMeasure dfiindependencemeasure.DFIIndependenceMeasure `json:"independence_measure"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/settingssimilaritydfr.go b/typedapi/types/settingssimilaritydfr.go index 999f32de70..89204339b8 100644 --- a/typedapi/types/settingssimilaritydfr.go +++ b/typedapi/types/settingssimilaritydfr.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -30,7 +30,7 @@ import ( // SettingsSimilarityDfr type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L195-L200 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L198-L203 type SettingsSimilarityDfr struct { AfterEffect dfraftereffect.DFRAfterEffect `json:"after_effect"` BasicModel dfrbasicmodel.DFRBasicModel `json:"basic_model"` diff --git a/typedapi/types/settingssimilarityib.go b/typedapi/types/settingssimilarityib.go index 1bb8d8f2c3..0d81d1b6b6 100644 --- a/typedapi/types/settingssimilarityib.go +++ b/typedapi/types/settingssimilarityib.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -30,7 +30,7 @@ import ( // SettingsSimilarityIb type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L202-L207 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L205-L210 type SettingsSimilarityIb struct { Distribution ibdistribution.IBDistribution `json:"distribution"` Lambda iblambda.IBLambda `json:"lambda"` diff --git a/typedapi/types/settingssimilaritylmd.go b/typedapi/types/settingssimilaritylmd.go index e18e5501ef..15d6c0df8f 100644 --- a/typedapi/types/settingssimilaritylmd.go +++ b/typedapi/types/settingssimilaritylmd.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,16 +24,17 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SettingsSimilarityLmd type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L209-L212 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L212-L215 type SettingsSimilarityLmd struct { - Mu int `json:"mu"` - Type string `json:"type,omitempty"` + Mu *Float64 `json:"mu,omitempty"` + Type string `json:"type,omitempty"` } func (s *SettingsSimilarityLmd) UnmarshalJSON(data []byte) error { @@ -52,24 +53,24 @@ func (s *SettingsSimilarityLmd) UnmarshalJSON(data []byte) error { switch t { case "mu": - var tmp interface{} dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Mu", err) } - s.Mu = value + f := Float64(value) + s.Mu = &f case float64: - f := int(v) - s.Mu = f + f := Float64(v) + s.Mu = &f } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/settingssimilaritylmj.go b/typedapi/types/settingssimilaritylmj.go index 9180d3d86e..c949fca77e 100644 --- a/typedapi/types/settingssimilaritylmj.go +++ b/typedapi/types/settingssimilaritylmj.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,16 +24,17 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SettingsSimilarityLmj type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L214-L217 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L217-L220 type SettingsSimilarityLmj struct { - Lambda Float64 `json:"lambda"` - Type string `json:"type,omitempty"` + Lambda *Float64 `json:"lambda,omitempty"` + Type string `json:"type,omitempty"` } func (s *SettingsSimilarityLmj) UnmarshalJSON(data []byte) error { @@ -58,18 +59,18 @@ func (s *SettingsSimilarityLmj) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lambda", err) } f := Float64(value) - s.Lambda = f + s.Lambda = &f case float64: f := Float64(v) - s.Lambda = f + s.Lambda = &f } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/settingssimilarityscripted.go b/typedapi/types/settingssimilarityscripted.go new file mode 100644 index 0000000000..1df4e05d5f --- /dev/null +++ b/typedapi/types/settingssimilarityscripted.go @@ -0,0 +1,156 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" +) + +// SettingsSimilarityScripted type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L222-L226 +type SettingsSimilarityScripted struct { + Script Script `json:"script"` + Type string `json:"type,omitempty"` + WeightScript Script `json:"weight_script,omitempty"` +} + +func (s *SettingsSimilarityScripted) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "script": + message := json.RawMessage{} + if err := dec.Decode(&message); err != nil { + return fmt.Errorf("%s | %w", "Script", err) + } + keyDec := json.NewDecoder(bytes.NewReader(message)) + for { + t, err := keyDec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return fmt.Errorf("%s | %w", "Script", err) + } + + switch t { + + case "lang", "options", "source": + o := NewInlineScript() + localDec := json.NewDecoder(bytes.NewReader(message)) + if err := localDec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Script", err) + } + s.Script = o + + case "id": + o := NewStoredScriptId() + localDec := json.NewDecoder(bytes.NewReader(message)) + if err := localDec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Script", err) + } + s.Script = o + + } + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return fmt.Errorf("%s | %w", "Type", err) + } + + case "weight_script": + message := json.RawMessage{} + if err := dec.Decode(&message); err != nil { + return fmt.Errorf("%s | %w", "WeightScript", err) + } + keyDec := json.NewDecoder(bytes.NewReader(message)) + for { + t, err := keyDec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return fmt.Errorf("%s | %w", "WeightScript", err) + } + + switch t { + + case "lang", "options", "source": + o := NewInlineScript() + localDec := json.NewDecoder(bytes.NewReader(message)) + if err := localDec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "WeightScript", err) + } + s.WeightScript = o + + case "id": + o := NewStoredScriptId() + localDec := json.NewDecoder(bytes.NewReader(message)) + if err := localDec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "WeightScript", err) + } + s.WeightScript = o + + } + } + + } + } + return nil +} + +// MarshalJSON override marshalling to include literal value +func (s SettingsSimilarityScripted) MarshalJSON() ([]byte, error) { + type innerSettingsSimilarityScripted SettingsSimilarityScripted + tmp := innerSettingsSimilarityScripted{ + Script: s.Script, + Type: s.Type, + WeightScript: s.WeightScript, + } + + tmp.Type = "scripted" + + return json.Marshal(tmp) +} + +// NewSettingsSimilarityScripted returns a SettingsSimilarityScripted. +func NewSettingsSimilarityScripted() *SettingsSimilarityScripted { + r := &SettingsSimilarityScripted{} + + return r +} diff --git a/typedapi/types/settingssimilarityscriptedtfidf.go b/typedapi/types/settingssimilarityscriptedtfidf.go deleted file mode 100644 index 610a42035d..0000000000 --- a/typedapi/types/settingssimilarityscriptedtfidf.go +++ /dev/null @@ -1,117 +0,0 @@ -// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 - -package types - -import ( - "bytes" - "encoding/json" - "errors" - "io" -) - -// SettingsSimilarityScriptedTfidf type. -// -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L219-L222 -type SettingsSimilarityScriptedTfidf struct { - Script Script `json:"script"` - Type string `json:"type,omitempty"` -} - -func (s *SettingsSimilarityScriptedTfidf) UnmarshalJSON(data []byte) error { - - dec := json.NewDecoder(bytes.NewReader(data)) - - for { - t, err := dec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "script": - message := json.RawMessage{} - if err := dec.Decode(&message); err != nil { - return err - } - keyDec := json.NewDecoder(bytes.NewReader(message)) - for { - t, err := keyDec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "lang", "options", "source": - o := NewInlineScript() - localDec := json.NewDecoder(bytes.NewReader(message)) - if err := localDec.Decode(&o); err != nil { - return err - } - s.Script = o - - case "id": - o := NewStoredScriptId() - localDec := json.NewDecoder(bytes.NewReader(message)) - if err := localDec.Decode(&o); err != nil { - return err - } - s.Script = o - - } - } - - case "type": - if err := dec.Decode(&s.Type); err != nil { - return err - } - - } - } - return nil -} - -// MarshalJSON override marshalling to include literal value -func (s SettingsSimilarityScriptedTfidf) MarshalJSON() ([]byte, error) { - type innerSettingsSimilarityScriptedTfidf SettingsSimilarityScriptedTfidf - tmp := innerSettingsSimilarityScriptedTfidf{ - Script: s.Script, - Type: s.Type, - } - - tmp.Type = "scripted" - - return json.Marshal(tmp) -} - -// NewSettingsSimilarityScriptedTfidf returns a SettingsSimilarityScriptedTfidf. -func NewSettingsSimilarityScriptedTfidf() *SettingsSimilarityScriptedTfidf { - r := &SettingsSimilarityScriptedTfidf{} - - return r -} diff --git a/typedapi/types/shapefieldquery.go b/typedapi/types/shapefieldquery.go index 6e5e5e74f7..f0c5453feb 100644 --- a/typedapi/types/shapefieldquery.go +++ b/typedapi/types/shapefieldquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/geoshaperelation" @@ -31,7 +32,7 @@ import ( // ShapeFieldQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L354-L367 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L354-L367 type ShapeFieldQuery struct { // IndexedShape Queries using a pre-indexed shape. IndexedShape *FieldLookup `json:"indexed_shape,omitempty"` @@ -59,17 +60,17 @@ func (s *ShapeFieldQuery) UnmarshalJSON(data []byte) error { case "indexed_shape": if err := dec.Decode(&s.IndexedShape); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexedShape", err) } case "relation": if err := dec.Decode(&s.Relation); err != nil { - return err + return fmt.Errorf("%s | %w", "Relation", err) } case "shape": if err := dec.Decode(&s.Shape); err != nil { - return err + return fmt.Errorf("%s | %w", "Shape", err) } } diff --git a/typedapi/types/shapeproperty.go b/typedapi/types/shapeproperty.go index 4250b4d1c4..d44a1e5c8b 100644 --- a/typedapi/types/shapeproperty.go +++ b/typedapi/types/shapeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // ShapeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/geo.ts#L69-L81 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/geo.ts#L73-L85 type ShapeProperty struct { Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -74,7 +75,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -87,13 +88,13 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -104,7 +105,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -113,7 +114,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -431,7 +432,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -446,7 +447,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -460,7 +461,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreZValue", err) } s.IgnoreZValue = &value case bool: @@ -472,12 +473,12 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "orientation": if err := dec.Decode(&s.Orientation); err != nil { - return err + return fmt.Errorf("%s | %w", "Orientation", err) } case "properties": @@ -790,7 +791,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -806,7 +807,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -815,7 +816,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/shapequery.go b/typedapi/types/shapequery.go index 063e9e9290..4abd8aff97 100644 --- a/typedapi/types/shapequery.go +++ b/typedapi/types/shapequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -31,7 +31,7 @@ import ( // ShapeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/specialized.ts#L344-L352 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/specialized.ts#L344-L352 type ShapeQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -68,7 +68,7 @@ func (s *ShapeQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -84,7 +84,7 @@ func (s *ShapeQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreUnmapped", err) } s.IgnoreUnmapped = &value case bool: @@ -94,7 +94,7 @@ func (s *ShapeQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -108,7 +108,7 @@ func (s *ShapeQuery) UnmarshalJSON(data []byte) error { s.ShapeQuery = make(map[string]ShapeFieldQuery, 0) } if err := dec.Decode(&s.ShapeQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "ShapeQuery", err) } default: diff --git a/typedapi/types/shardcommit.go b/typedapi/types/shardcommit.go index 16a18df39d..aadd68dc26 100644 --- a/typedapi/types/shardcommit.go +++ b/typedapi/types/shardcommit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardCommit type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L112-L117 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L112-L117 type ShardCommit struct { Generation int `json:"generation"` Id string `json:"id"` @@ -61,7 +62,7 @@ func (s *ShardCommit) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Generation", err) } s.Generation = value case float64: @@ -71,7 +72,7 @@ func (s *ShardCommit) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "num_docs": @@ -81,7 +82,7 @@ func (s *ShardCommit) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumDocs", err) } s.NumDocs = value case float64: @@ -94,7 +95,7 @@ func (s *ShardCommit) UnmarshalJSON(data []byte) error { s.UserData = make(map[string]string, 0) } if err := dec.Decode(&s.UserData); err != nil { - return err + return fmt.Errorf("%s | %w", "UserData", err) } } diff --git a/typedapi/types/shardfailure.go b/typedapi/types/shardfailure.go index ca7f8f98c3..fc68c378b4 100644 --- a/typedapi/types/shardfailure.go +++ b/typedapi/types/shardfailure.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardFailure type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Errors.ts#L50-L56 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Errors.ts#L50-L56 type ShardFailure struct { Index *string `json:"index,omitempty"` Node *string `json:"node,omitempty"` @@ -56,13 +57,13 @@ func (s *ShardFailure) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "node": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -73,7 +74,7 @@ func (s *ShardFailure) UnmarshalJSON(data []byte) error { case "reason": if err := dec.Decode(&s.Reason); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } case "shard": @@ -84,7 +85,7 @@ func (s *ShardFailure) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Shard", err) } s.Shard = value case float64: @@ -95,7 +96,7 @@ func (s *ShardFailure) UnmarshalJSON(data []byte) error { case "status": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/shardfilesizeinfo.go b/typedapi/types/shardfilesizeinfo.go index 78d4bcaad1..f47792692a 100644 --- a/typedapi/types/shardfilesizeinfo.go +++ b/typedapi/types/shardfilesizeinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardFileSizeInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L124-L131 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L124-L131 type ShardFileSizeInfo struct { AverageSizeInBytes *int64 `json:"average_size_in_bytes,omitempty"` Count *int64 `json:"count,omitempty"` @@ -62,7 +63,7 @@ func (s *ShardFileSizeInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "AverageSizeInBytes", err) } s.AverageSizeInBytes = &value case float64: @@ -77,7 +78,7 @@ func (s *ShardFileSizeInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = &value case float64: @@ -88,7 +89,7 @@ func (s *ShardFileSizeInfo) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,7 +105,7 @@ func (s *ShardFileSizeInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxSizeInBytes", err) } s.MaxSizeInBytes = &value case float64: @@ -119,7 +120,7 @@ func (s *ShardFileSizeInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinSizeInBytes", err) } s.MinSizeInBytes = &value case float64: @@ -134,7 +135,7 @@ func (s *ShardFileSizeInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SizeInBytes", err) } s.SizeInBytes = value case float64: diff --git a/typedapi/types/shardhealthstats.go b/typedapi/types/shardhealthstats.go index d2d10fad9f..43b525ca3d 100644 --- a/typedapi/types/shardhealthstats.go +++ b/typedapi/types/shardhealthstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // ShardHealthStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/health/types.ts#L36-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/health/types.ts#L36-L43 type ShardHealthStats struct { ActiveShards int `json:"active_shards"` InitializingShards int `json:"initializing_shards"` @@ -65,7 +66,7 @@ func (s *ShardHealthStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ActiveShards", err) } s.ActiveShards = value case float64: @@ -81,7 +82,7 @@ func (s *ShardHealthStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "InitializingShards", err) } s.InitializingShards = value case float64: @@ -96,7 +97,7 @@ func (s *ShardHealthStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryActive", err) } s.PrimaryActive = value case bool: @@ -111,7 +112,7 @@ func (s *ShardHealthStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RelocatingShards", err) } s.RelocatingShards = value case float64: @@ -121,7 +122,7 @@ func (s *ShardHealthStats) UnmarshalJSON(data []byte) error { case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "unassigned_shards": @@ -132,7 +133,7 @@ func (s *ShardHealthStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UnassignedShards", err) } s.UnassignedShards = value case float64: diff --git a/typedapi/types/shardlease.go b/typedapi/types/shardlease.go index 0098589c55..4e2621470d 100644 --- a/typedapi/types/shardlease.go +++ b/typedapi/types/shardlease.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardLease type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L133-L138 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L133-L138 type ShardLease struct { Id string `json:"id"` RetainingSeqNo int64 `json:"retaining_seq_no"` @@ -55,18 +56,18 @@ func (s *ShardLease) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "retaining_seq_no": if err := dec.Decode(&s.RetainingSeqNo); err != nil { - return err + return fmt.Errorf("%s | %w", "RetainingSeqNo", err) } case "source": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,7 +83,7 @@ func (s *ShardLease) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } s.Timestamp = value case float64: diff --git a/typedapi/types/shardmigrationstatus.go b/typedapi/types/shardmigrationstatus.go index 80cda6c89d..641043832e 100644 --- a/typedapi/types/shardmigrationstatus.go +++ b/typedapi/types/shardmigrationstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // ShardMigrationStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L52-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L52-L54 type ShardMigrationStatus struct { Status shutdownstatus.ShutdownStatus `json:"status"` } diff --git a/typedapi/types/shardpath.go b/typedapi/types/shardpath.go index 5b5fa81dc1..52cb1595a4 100644 --- a/typedapi/types/shardpath.go +++ b/typedapi/types/shardpath.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardPath type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L140-L144 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L140-L144 type ShardPath struct { DataPath string `json:"data_path"` IsCustomDataPath bool `json:"is_custom_data_path"` @@ -55,7 +56,7 @@ func (s *ShardPath) UnmarshalJSON(data []byte) error { case "data_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -71,7 +72,7 @@ func (s *ShardPath) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IsCustomDataPath", err) } s.IsCustomDataPath = value case bool: @@ -81,7 +82,7 @@ func (s *ShardPath) UnmarshalJSON(data []byte) error { case "state_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "StatePath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/shardprofile.go b/typedapi/types/shardprofile.go index 37dd6611f1..6b3f6af324 100644 --- a/typedapi/types/shardprofile.go +++ b/typedapi/types/shardprofile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/profile.ts#L132-L137 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/profile.ts#L132-L137 type ShardProfile struct { Aggregations []AggregationProfile `json:"aggregations"` Fetch *FetchProfile `json:"fetch,omitempty"` @@ -55,18 +56,18 @@ func (s *ShardProfile) UnmarshalJSON(data []byte) error { case "aggregations": if err := dec.Decode(&s.Aggregations); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } case "fetch": if err := dec.Decode(&s.Fetch); err != nil { - return err + return fmt.Errorf("%s | %w", "Fetch", err) } case "id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -77,7 +78,7 @@ func (s *ShardProfile) UnmarshalJSON(data []byte) error { case "searches": if err := dec.Decode(&s.Searches); err != nil { - return err + return fmt.Errorf("%s | %w", "Searches", err) } } diff --git a/typedapi/types/shardquerycache.go b/typedapi/types/shardquerycache.go index e88505c1f7..45672c89b7 100644 --- a/typedapi/types/shardquerycache.go +++ b/typedapi/types/shardquerycache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardQueryCache type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L146-L154 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L146-L154 type ShardQueryCache struct { CacheCount int64 `json:"cache_count"` CacheSize int64 `json:"cache_size"` @@ -63,7 +64,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CacheCount", err) } s.CacheCount = value case float64: @@ -78,7 +79,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CacheSize", err) } s.CacheSize = value case float64: @@ -93,7 +94,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Evictions", err) } s.Evictions = value case float64: @@ -108,7 +109,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "HitCount", err) } s.HitCount = value case float64: @@ -123,7 +124,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MemorySizeInBytes", err) } s.MemorySizeInBytes = value case float64: @@ -138,7 +139,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MissCount", err) } s.MissCount = value case float64: @@ -153,7 +154,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalCount", err) } s.TotalCount = value case float64: diff --git a/typedapi/types/shardrecovery.go b/typedapi/types/shardrecovery.go index b89e61755b..8fb95f2e3c 100644 --- a/typedapi/types/shardrecovery.go +++ b/typedapi/types/shardrecovery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardRecovery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/recovery/types.ts#L118-L135 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/recovery/types.ts#L118-L135 type ShardRecovery struct { Id int64 `json:"id"` Index RecoveryIndexStatus `json:"index"` @@ -72,7 +73,7 @@ func (s *ShardRecovery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } s.Id = value case float64: @@ -82,7 +83,7 @@ func (s *ShardRecovery) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "primary": @@ -92,7 +93,7 @@ func (s *ShardRecovery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Primary", err) } s.Primary = value case bool: @@ -101,13 +102,13 @@ func (s *ShardRecovery) UnmarshalJSON(data []byte) error { case "source": if err := dec.Decode(&s.Source); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } case "stage": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Stage", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -118,53 +119,53 @@ func (s *ShardRecovery) UnmarshalJSON(data []byte) error { case "start": if err := dec.Decode(&s.Start); err != nil { - return err + return fmt.Errorf("%s | %w", "Start", err) } case "start_time": if err := dec.Decode(&s.StartTime); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTime", err) } case "start_time_in_millis": if err := dec.Decode(&s.StartTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTimeInMillis", err) } case "stop_time": if err := dec.Decode(&s.StopTime); err != nil { - return err + return fmt.Errorf("%s | %w", "StopTime", err) } case "stop_time_in_millis": if err := dec.Decode(&s.StopTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StopTimeInMillis", err) } case "target": if err := dec.Decode(&s.Target); err != nil { - return err + return fmt.Errorf("%s | %w", "Target", err) } case "total_time": if err := dec.Decode(&s.TotalTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTime", err) } case "total_time_in_millis": if err := dec.Decode(&s.TotalTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTimeInMillis", err) } case "translog": if err := dec.Decode(&s.Translog); err != nil { - return err + return fmt.Errorf("%s | %w", "Translog", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -175,7 +176,7 @@ func (s *ShardRecovery) UnmarshalJSON(data []byte) error { case "verify_index": if err := dec.Decode(&s.VerifyIndex); err != nil { - return err + return fmt.Errorf("%s | %w", "VerifyIndex", err) } } diff --git a/typedapi/types/shardretentionleases.go b/typedapi/types/shardretentionleases.go index bb3e5059a5..282fbe31e4 100644 --- a/typedapi/types/shardretentionleases.go +++ b/typedapi/types/shardretentionleases.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardRetentionLeases type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L156-L160 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L156-L160 type ShardRetentionLeases struct { Leases []ShardLease `json:"leases"` PrimaryTerm int64 `json:"primary_term"` @@ -54,7 +55,7 @@ func (s *ShardRetentionLeases) UnmarshalJSON(data []byte) error { case "leases": if err := dec.Decode(&s.Leases); err != nil { - return err + return fmt.Errorf("%s | %w", "Leases", err) } case "primary_term": @@ -64,7 +65,7 @@ func (s *ShardRetentionLeases) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryTerm", err) } s.PrimaryTerm = value case float64: @@ -74,7 +75,7 @@ func (s *ShardRetentionLeases) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/shardrouting.go b/typedapi/types/shardrouting.go index 038e132e2a..e8d24b48cd 100644 --- a/typedapi/types/shardrouting.go +++ b/typedapi/types/shardrouting.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // ShardRouting type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L162-L167 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L162-L167 type ShardRouting struct { Node string `json:"node"` Primary bool `json:"primary"` @@ -58,7 +59,7 @@ func (s *ShardRouting) UnmarshalJSON(data []byte) error { case "node": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,7 +75,7 @@ func (s *ShardRouting) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Primary", err) } s.Primary = value case bool: @@ -84,7 +85,7 @@ func (s *ShardRouting) UnmarshalJSON(data []byte) error { case "relocating_node": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RelocatingNode", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -95,7 +96,7 @@ func (s *ShardRouting) UnmarshalJSON(data []byte) error { case "state": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } } diff --git a/typedapi/types/shardsavailabilityindicator.go b/typedapi/types/shardsavailabilityindicator.go index 8432fd7bc3..45c298cc27 100644 --- a/typedapi/types/shardsavailabilityindicator.go +++ b/typedapi/types/shardsavailabilityindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // ShardsAvailabilityIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L104-L108 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L104-L108 type ShardsAvailabilityIndicator struct { Details *ShardsAvailabilityIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` @@ -58,28 +59,28 @@ func (s *ShardsAvailabilityIndicator) UnmarshalJSON(data []byte) error { case "details": if err := dec.Decode(&s.Details); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } case "diagnosis": if err := dec.Decode(&s.Diagnosis); err != nil { - return err + return fmt.Errorf("%s | %w", "Diagnosis", err) } case "impacts": if err := dec.Decode(&s.Impacts); err != nil { - return err + return fmt.Errorf("%s | %w", "Impacts", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "symptom": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Symptom", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/shardsavailabilityindicatordetails.go b/typedapi/types/shardsavailabilityindicatordetails.go index b410529d94..1d961947fc 100644 --- a/typedapi/types/shardsavailabilityindicatordetails.go +++ b/typedapi/types/shardsavailabilityindicatordetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardsAvailabilityIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L109-L119 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L109-L119 type ShardsAvailabilityIndicatorDetails struct { CreatingPrimaries int64 `json:"creating_primaries"` InitializingPrimaries int64 `json:"initializing_primaries"` @@ -65,7 +66,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "CreatingPrimaries", err) } s.CreatingPrimaries = value case float64: @@ -80,7 +81,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "InitializingPrimaries", err) } s.InitializingPrimaries = value case float64: @@ -95,7 +96,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "InitializingReplicas", err) } s.InitializingReplicas = value case float64: @@ -110,7 +111,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RestartingPrimaries", err) } s.RestartingPrimaries = value case float64: @@ -125,7 +126,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RestartingReplicas", err) } s.RestartingReplicas = value case float64: @@ -140,7 +141,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "StartedPrimaries", err) } s.StartedPrimaries = value case float64: @@ -155,7 +156,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "StartedReplicas", err) } s.StartedReplicas = value case float64: @@ -170,7 +171,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UnassignedPrimaries", err) } s.UnassignedPrimaries = value case float64: @@ -185,7 +186,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UnassignedReplicas", err) } s.UnassignedReplicas = value case float64: diff --git a/typedapi/types/shardscapacityindicator.go b/typedapi/types/shardscapacityindicator.go index 3cb110111d..0a60720ede 100644 --- a/typedapi/types/shardscapacityindicator.go +++ b/typedapi/types/shardscapacityindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // ShardsCapacityIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L171-L175 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L171-L175 type ShardsCapacityIndicator struct { Details *ShardsCapacityIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` @@ -58,28 +59,28 @@ func (s *ShardsCapacityIndicator) UnmarshalJSON(data []byte) error { case "details": if err := dec.Decode(&s.Details); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } case "diagnosis": if err := dec.Decode(&s.Diagnosis); err != nil { - return err + return fmt.Errorf("%s | %w", "Diagnosis", err) } case "impacts": if err := dec.Decode(&s.Impacts); err != nil { - return err + return fmt.Errorf("%s | %w", "Impacts", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "symptom": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Symptom", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/shardscapacityindicatordetails.go b/typedapi/types/shardscapacityindicatordetails.go index 90fefbd714..2f37771367 100644 --- a/typedapi/types/shardscapacityindicatordetails.go +++ b/typedapi/types/shardscapacityindicatordetails.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ShardsCapacityIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L177-L180 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L177-L180 type ShardsCapacityIndicatorDetails struct { Data ShardsCapacityIndicatorTierDetail `json:"data"` Frozen ShardsCapacityIndicatorTierDetail `json:"frozen"` diff --git a/typedapi/types/shardscapacityindicatortierdetail.go b/typedapi/types/shardscapacityindicatortierdetail.go index 72f10fef90..2ecec7b03a 100644 --- a/typedapi/types/shardscapacityindicatortierdetail.go +++ b/typedapi/types/shardscapacityindicatortierdetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardsCapacityIndicatorTierDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L182-L185 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L182-L185 type ShardsCapacityIndicatorTierDetail struct { CurrentUsedShards *int `json:"current_used_shards,omitempty"` MaxShardsInCluster int `json:"max_shards_in_cluster"` @@ -59,7 +60,7 @@ func (s *ShardsCapacityIndicatorTierDetail) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CurrentUsedShards", err) } s.CurrentUsedShards = &value case float64: @@ -75,7 +76,7 @@ func (s *ShardsCapacityIndicatorTierDetail) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxShardsInCluster", err) } s.MaxShardsInCluster = value case float64: diff --git a/typedapi/types/shardsegmentrouting.go b/typedapi/types/shardsegmentrouting.go index de0cf52d2a..14e943d695 100644 --- a/typedapi/types/shardsegmentrouting.go +++ b/typedapi/types/shardsegmentrouting.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardSegmentRouting type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/segments/types.ts#L40-L44 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/segments/types.ts#L40-L44 type ShardSegmentRouting struct { Node string `json:"node"` Primary bool `json:"primary"` @@ -55,7 +56,7 @@ func (s *ShardSegmentRouting) UnmarshalJSON(data []byte) error { case "node": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -71,7 +72,7 @@ func (s *ShardSegmentRouting) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Primary", err) } s.Primary = value case bool: @@ -81,7 +82,7 @@ func (s *ShardSegmentRouting) UnmarshalJSON(data []byte) error { case "state": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/shardsequencenumber.go b/typedapi/types/shardsequencenumber.go index 81c673d2be..495bb48ae9 100644 --- a/typedapi/types/shardsequencenumber.go +++ b/typedapi/types/shardsequencenumber.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardSequenceNumber type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L176-L180 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L176-L180 type ShardSequenceNumber struct { GlobalCheckpoint int64 `json:"global_checkpoint"` LocalCheckpoint int64 `json:"local_checkpoint"` @@ -59,7 +60,7 @@ func (s *ShardSequenceNumber) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "GlobalCheckpoint", err) } s.GlobalCheckpoint = value case float64: @@ -74,7 +75,7 @@ func (s *ShardSequenceNumber) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LocalCheckpoint", err) } s.LocalCheckpoint = value case float64: @@ -84,7 +85,7 @@ func (s *ShardSequenceNumber) UnmarshalJSON(data []byte) error { case "max_seq_no": if err := dec.Decode(&s.MaxSeqNo); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxSeqNo", err) } } diff --git a/typedapi/types/shardsrecord.go b/typedapi/types/shardsrecord.go index 2d28b9a061..bceb269292 100644 --- a/typedapi/types/shardsrecord.go +++ b/typedapi/types/shardsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/shards/types.ts#L20-L421 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/shards/types.ts#L20-L421 type ShardsRecord struct { // BulkAvgSizeInBytes The average size in bytes of shard bulk operations. BulkAvgSizeInBytes *string `json:"bulk.avg_size_in_bytes,omitempty"` @@ -244,7 +245,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "bulk.avg_size_in_bytes", "basi", "bulkAvgSizeInBytes": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkAvgSizeInBytes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -256,7 +257,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "bulk.avg_time", "bati", "bulkAvgTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkAvgTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -268,7 +269,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "bulk.total_operations", "bto", "bulkTotalOperations": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkTotalOperations", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -280,7 +281,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "bulk.total_size_in_bytes", "btsi", "bulkTotalSizeInBytes": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkTotalSizeInBytes", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -292,7 +293,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "bulk.total_time", "btti", "bulkTotalTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "BulkTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -304,7 +305,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "completion.size", "cs", "completionSize": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CompletionSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -316,7 +317,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "docs", "d", "dc": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Docs", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -328,7 +329,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "fielddata.evictions", "fe", "fielddataEvictions": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FielddataEvictions", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -340,7 +341,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "fielddata.memory_size", "fm", "fielddataMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FielddataMemorySize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -352,7 +353,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "flush.total", "ft", "flushTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FlushTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -364,7 +365,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "flush.total_time", "ftt", "flushTotalTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FlushTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -376,7 +377,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "get.current", "gc", "getCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -388,7 +389,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "get.exists_time", "geti", "getExistsTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetExistsTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -400,7 +401,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "get.exists_total", "geto", "getExistsTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetExistsTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -412,7 +413,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "get.missing_time", "gmti", "getMissingTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetMissingTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -424,7 +425,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "get.missing_total", "gmto", "getMissingTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetMissingTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -436,7 +437,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "get.time", "gti", "getTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -448,7 +449,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "get.total", "gto", "getTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "GetTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -460,7 +461,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -472,7 +473,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "index", "i", "idx": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -484,7 +485,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "indexing.delete_current", "idc", "indexingDeleteCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingDeleteCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -496,7 +497,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "indexing.delete_time", "idti", "indexingDeleteTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingDeleteTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -508,7 +509,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "indexing.delete_total", "idto", "indexingDeleteTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingDeleteTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -520,7 +521,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "indexing.index_current", "iic", "indexingIndexCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingIndexCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -532,7 +533,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "indexing.index_failed", "iif", "indexingIndexFailed": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingIndexFailed", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -544,7 +545,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "indexing.index_time", "iiti", "indexingIndexTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingIndexTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -556,7 +557,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "indexing.index_total", "iito", "indexingIndexTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingIndexTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -568,7 +569,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "ip": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -580,7 +581,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "merges.current", "mc", "mergesCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -592,7 +593,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "merges.current_docs", "mcd", "mergesCurrentDocs": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesCurrentDocs", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -604,7 +605,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "merges.current_size", "mcs", "mergesCurrentSize": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesCurrentSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -616,7 +617,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "merges.total", "mt", "mergesTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -628,7 +629,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "merges.total_docs", "mtd", "mergesTotalDocs": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesTotalDocs", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -640,7 +641,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "merges.total_size", "mts", "mergesTotalSize": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesTotalSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -652,7 +653,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "merges.total_time", "mtt", "mergesTotalTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MergesTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -664,7 +665,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "node", "n": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -676,7 +677,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "path.data", "pd", "dataPath": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PathData", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -688,7 +689,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "path.state", "ps", "statsPath": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PathState", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -700,7 +701,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "prirep", "p", "pr", "primaryOrReplica": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Prirep", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -712,7 +713,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "query_cache.evictions", "qce", "queryCacheEvictions": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryCacheEvictions", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -724,7 +725,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "query_cache.memory_size", "qcm", "queryCacheMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryCacheMemorySize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -736,7 +737,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "recoverysource.type", "rs": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RecoverysourceType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -748,7 +749,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "refresh.external_time", "rti", "refreshTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshExternalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -760,7 +761,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "refresh.external_total", "rto", "refreshTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshExternalTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -772,7 +773,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "refresh.listeners", "rli", "refreshListeners": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshListeners", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -784,7 +785,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "refresh.time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -796,7 +797,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "refresh.total": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RefreshTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -808,7 +809,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "search.fetch_current", "sfc", "searchFetchCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchFetchCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -820,7 +821,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "search.fetch_time", "sfti", "searchFetchTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchFetchTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -832,7 +833,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "search.fetch_total", "sfto", "searchFetchTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchFetchTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -844,7 +845,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "search.open_contexts", "so", "searchOpenContexts": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchOpenContexts", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -856,7 +857,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "search.query_current", "sqc", "searchQueryCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchQueryCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -868,7 +869,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "search.query_time", "sqti", "searchQueryTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchQueryTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -880,7 +881,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "search.query_total", "sqto", "searchQueryTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchQueryTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -892,7 +893,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "search.scroll_current", "scc", "searchScrollCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchScrollCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -904,7 +905,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "search.scroll_time", "scti", "searchScrollTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchScrollTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -916,7 +917,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "search.scroll_total", "scto", "searchScrollTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchScrollTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -928,7 +929,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "segments.count", "sc", "segmentsCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -940,7 +941,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "segments.fixed_bitset_memory", "sfbm", "fixedBitsetMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsFixedBitsetMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -952,7 +953,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "segments.index_writer_memory", "siwm", "segmentsIndexWriterMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsIndexWriterMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -964,7 +965,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "segments.memory", "sm", "segmentsMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -976,7 +977,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "segments.version_map_memory", "svmm", "segmentsVersionMapMemory": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SegmentsVersionMapMemory", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -988,7 +989,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "seq_no.global_checkpoint", "sqg", "globalCheckpoint": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNoGlobalCheckpoint", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1000,7 +1001,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "seq_no.local_checkpoint", "sql", "localCheckpoint": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNoLocalCheckpoint", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1012,7 +1013,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "seq_no.max", "sqm", "maxSeqNo": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNoMax", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1024,7 +1025,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "shard", "s", "sh": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Shard", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1036,7 +1037,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "state", "st": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1048,7 +1049,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "store", "sto": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1060,7 +1061,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "sync_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SyncId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1072,7 +1073,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "unassigned.at", "ua": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UnassignedAt", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1084,7 +1085,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "unassigned.details", "ud": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UnassignedDetails", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1096,7 +1097,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "unassigned.for", "uf": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UnassignedFor", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1108,7 +1109,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "unassigned.reason", "ur": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UnassignedReason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1120,7 +1121,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "warmer.current", "wc", "warmerCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "WarmerCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1132,7 +1133,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "warmer.total", "wto", "warmerTotal": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "WarmerTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -1144,7 +1145,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { case "warmer.total_time", "wtt", "warmerTotalTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "WarmerTotalTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/shardssegment.go b/typedapi/types/shardssegment.go index 59898af236..9ad5e27f7d 100644 --- a/typedapi/types/shardssegment.go +++ b/typedapi/types/shardssegment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardsSegment type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/segments/types.ts#L46-L51 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/segments/types.ts#L46-L51 type ShardsSegment struct { NumCommittedSegments int `json:"num_committed_segments"` NumSearchSegments int `json:"num_search_segments"` @@ -61,7 +62,7 @@ func (s *ShardsSegment) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumCommittedSegments", err) } s.NumCommittedSegments = value case float64: @@ -77,7 +78,7 @@ func (s *ShardsSegment) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumSearchSegments", err) } s.NumSearchSegments = value case float64: @@ -87,7 +88,7 @@ func (s *ShardsSegment) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "segments": @@ -95,7 +96,7 @@ func (s *ShardsSegment) UnmarshalJSON(data []byte) error { s.Segments = make(map[string]Segment, 0) } if err := dec.Decode(&s.Segments); err != nil { - return err + return fmt.Errorf("%s | %w", "Segments", err) } } diff --git a/typedapi/types/shardsstatssummary.go b/typedapi/types/shardsstatssummary.go index 116ff4385c..c21d2942c3 100644 --- a/typedapi/types/shardsstatssummary.go +++ b/typedapi/types/shardsstatssummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // ShardsStatsSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotShardsStatus.ts#L29-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotShardsStatus.ts#L29-L35 type ShardsStatsSummary struct { Incremental ShardsStatsSummaryItem `json:"incremental"` StartTimeInMillis int64 `json:"start_time_in_millis"` @@ -55,27 +56,27 @@ func (s *ShardsStatsSummary) UnmarshalJSON(data []byte) error { case "incremental": if err := dec.Decode(&s.Incremental); err != nil { - return err + return fmt.Errorf("%s | %w", "Incremental", err) } case "start_time_in_millis": if err := dec.Decode(&s.StartTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTimeInMillis", err) } case "time": if err := dec.Decode(&s.Time); err != nil { - return err + return fmt.Errorf("%s | %w", "Time", err) } case "time_in_millis": if err := dec.Decode(&s.TimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeInMillis", err) } case "total": if err := dec.Decode(&s.Total); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } } diff --git a/typedapi/types/shardsstatssummaryitem.go b/typedapi/types/shardsstatssummaryitem.go index 7a3668f194..01d8f80aba 100644 --- a/typedapi/types/shardsstatssummaryitem.go +++ b/typedapi/types/shardsstatssummaryitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardsStatsSummaryItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotShardsStatus.ts#L37-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotShardsStatus.ts#L37-L40 type ShardsStatsSummaryItem struct { FileCount int64 `json:"file_count"` SizeInBytes int64 `json:"size_in_bytes"` @@ -58,7 +59,7 @@ func (s *ShardsStatsSummaryItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "FileCount", err) } s.FileCount = value case float64: @@ -73,7 +74,7 @@ func (s *ShardsStatsSummaryItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SizeInBytes", err) } s.SizeInBytes = value case float64: diff --git a/typedapi/types/shardstatistics.go b/typedapi/types/shardstatistics.go index 3f0bcc7788..8ead1313e7 100644 --- a/typedapi/types/shardstatistics.go +++ b/typedapi/types/shardstatistics.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ShardStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L54-L66 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L54-L66 type ShardStatistics struct { Failed uint `json:"failed"` Failures []ShardFailure `json:"failures,omitempty"` diff --git a/typedapi/types/shardstore.go b/typedapi/types/shardstore.go index 577361d374..fb1a8c2d6d 100644 --- a/typedapi/types/shardstore.go +++ b/typedapi/types/shardstore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // ShardStore type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/shard_stores/types.ts#L30-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/shard_stores/types.ts#L30-L34 type ShardStore struct { Allocation shardstoreallocation.ShardStoreAllocation `json:"allocation"` AllocationId *string `json:"allocation_id,omitempty"` @@ -57,12 +57,12 @@ func (s *ShardStore) UnmarshalJSON(data []byte) error { case "allocation": if err := dec.Decode(&s.Allocation); err != nil { - return err + return fmt.Errorf("%s | %w", "Allocation", err) } case "allocation_id": if err := dec.Decode(&s.AllocationId); err != nil { - return err + return fmt.Errorf("%s | %w", "AllocationId", err) } case "ShardStore": @@ -70,12 +70,12 @@ func (s *ShardStore) UnmarshalJSON(data []byte) error { s.ShardStore = make(map[string]ShardStoreNode, 0) } if err := dec.Decode(&s.ShardStore); err != nil { - return err + return fmt.Errorf("%s | %w", "ShardStore", err) } case "store_exception": if err := dec.Decode(&s.StoreException); err != nil { - return err + return fmt.Errorf("%s | %w", "StoreException", err) } default: diff --git a/typedapi/types/shardstoreexception.go b/typedapi/types/shardstoreexception.go index b4171e8b27..81543e6928 100644 --- a/typedapi/types/shardstoreexception.go +++ b/typedapi/types/shardstoreexception.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardStoreException type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/shard_stores/types.ts#L51-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/shard_stores/types.ts#L51-L54 type ShardStoreException struct { Reason string `json:"reason"` Type string `json:"type"` @@ -54,7 +55,7 @@ func (s *ShardStoreException) UnmarshalJSON(data []byte) error { case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,7 +67,7 @@ func (s *ShardStoreException) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/shardstoreindex.go b/typedapi/types/shardstoreindex.go index 5c6617c047..ac4b74ce14 100644 --- a/typedapi/types/shardstoreindex.go +++ b/typedapi/types/shardstoreindex.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ShardStoreIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search_shards/SearchShardsResponse.ts#L33-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search_shards/SearchShardsResponse.ts#L33-L36 type ShardStoreIndex struct { Aliases []string `json:"aliases,omitempty"` Filter *Query `json:"filter,omitempty"` diff --git a/typedapi/types/shardstorenode.go b/typedapi/types/shardstorenode.go index 95beecae85..7aaed849f1 100644 --- a/typedapi/types/shardstorenode.go +++ b/typedapi/types/shardstorenode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardStoreNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/shard_stores/types.ts#L36-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/shard_stores/types.ts#L36-L43 type ShardStoreNode struct { Attributes map[string]string `json:"attributes"` EphemeralId *string `json:"ephemeral_id,omitempty"` @@ -60,13 +61,13 @@ func (s *ShardStoreNode) UnmarshalJSON(data []byte) error { s.Attributes = make(map[string]string, 0) } if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "ephemeral_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "EphemeralId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,7 +79,7 @@ func (s *ShardStoreNode) UnmarshalJSON(data []byte) error { case "external_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ExternalId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -89,17 +90,17 @@ func (s *ShardStoreNode) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "transport_address": if err := dec.Decode(&s.TransportAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "TransportAddress", err) } } diff --git a/typedapi/types/shardstorewrapper.go b/typedapi/types/shardstorewrapper.go index 4d65fc8e38..322cd59892 100644 --- a/typedapi/types/shardstorewrapper.go +++ b/typedapi/types/shardstorewrapper.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // ShardStoreWrapper type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/shard_stores/types.ts#L56-L58 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/shard_stores/types.ts#L56-L58 type ShardStoreWrapper struct { Stores []ShardStore `json:"stores"` } diff --git a/typedapi/types/shardstotalstats.go b/typedapi/types/shardstotalstats.go index e26722719c..e08611e426 100644 --- a/typedapi/types/shardstotalstats.go +++ b/typedapi/types/shardstotalstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShardsTotalStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/stats/types.ts#L182-L184 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/stats/types.ts#L182-L184 type ShardsTotalStats struct { TotalCount int64 `json:"total_count"` } @@ -57,7 +58,7 @@ func (s *ShardsTotalStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalCount", err) } s.TotalCount = value case float64: diff --git a/typedapi/types/shared.go b/typedapi/types/shared.go index 43730925ce..df5f7724ec 100644 --- a/typedapi/types/shared.go +++ b/typedapi/types/shared.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Shared type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/searchable_snapshots/cache_stats/Response.ts#L34-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/searchable_snapshots/cache_stats/Response.ts#L34-L43 type Shared struct { BytesReadInBytes ByteSize `json:"bytes_read_in_bytes"` BytesWrittenInBytes ByteSize `json:"bytes_written_in_bytes"` @@ -59,12 +60,12 @@ func (s *Shared) UnmarshalJSON(data []byte) error { case "bytes_read_in_bytes": if err := dec.Decode(&s.BytesReadInBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "BytesReadInBytes", err) } case "bytes_written_in_bytes": if err := dec.Decode(&s.BytesWrittenInBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "BytesWrittenInBytes", err) } case "evictions": @@ -74,7 +75,7 @@ func (s *Shared) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Evictions", err) } s.Evictions = value case float64: @@ -90,7 +91,7 @@ func (s *Shared) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumRegions", err) } s.NumRegions = value case float64: @@ -105,7 +106,7 @@ func (s *Shared) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Reads", err) } s.Reads = value case float64: @@ -115,12 +116,12 @@ func (s *Shared) UnmarshalJSON(data []byte) error { case "region_size_in_bytes": if err := dec.Decode(&s.RegionSizeInBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "RegionSizeInBytes", err) } case "size_in_bytes": if err := dec.Decode(&s.SizeInBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "SizeInBytes", err) } case "writes": @@ -130,7 +131,7 @@ func (s *Shared) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Writes", err) } s.Writes = value case float64: diff --git a/typedapi/types/sharedfilesystemrepository.go b/typedapi/types/sharedfilesystemrepository.go new file mode 100644 index 0000000000..5ba3da23c1 --- /dev/null +++ b/typedapi/types/sharedfilesystemrepository.go @@ -0,0 +1,94 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" +) + +// SharedFileSystemRepository type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotRepository.ts#L55-L58 +type SharedFileSystemRepository struct { + Settings SharedFileSystemRepositorySettings `json:"settings"` + Type string `json:"type,omitempty"` + Uuid *string `json:"uuid,omitempty"` +} + +func (s *SharedFileSystemRepository) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return fmt.Errorf("%s | %w", "Settings", err) + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return fmt.Errorf("%s | %w", "Type", err) + } + + case "uuid": + if err := dec.Decode(&s.Uuid); err != nil { + return fmt.Errorf("%s | %w", "Uuid", err) + } + + } + } + return nil +} + +// MarshalJSON override marshalling to include literal value +func (s SharedFileSystemRepository) MarshalJSON() ([]byte, error) { + type innerSharedFileSystemRepository SharedFileSystemRepository + tmp := innerSharedFileSystemRepository{ + Settings: s.Settings, + Type: s.Type, + Uuid: s.Uuid, + } + + tmp.Type = "fs" + + return json.Marshal(tmp) +} + +// NewSharedFileSystemRepository returns a SharedFileSystemRepository. +func NewSharedFileSystemRepository() *SharedFileSystemRepository { + r := &SharedFileSystemRepository{} + + return r +} diff --git a/typedapi/types/sharedfilesystemrepositorysettings.go b/typedapi/types/sharedfilesystemrepositorysettings.go new file mode 100644 index 0000000000..bf55fcf7be --- /dev/null +++ b/typedapi/types/sharedfilesystemrepositorysettings.go @@ -0,0 +1,141 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// SharedFileSystemRepositorySettings type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotRepository.ts#L104-L108 +type SharedFileSystemRepositorySettings struct { + ChunkSize ByteSize `json:"chunk_size,omitempty"` + Compress *bool `json:"compress,omitempty"` + Location string `json:"location"` + MaxNumberOfSnapshots *int `json:"max_number_of_snapshots,omitempty"` + MaxRestoreBytesPerSec ByteSize `json:"max_restore_bytes_per_sec,omitempty"` + MaxSnapshotBytesPerSec ByteSize `json:"max_snapshot_bytes_per_sec,omitempty"` + Readonly *bool `json:"readonly,omitempty"` +} + +func (s *SharedFileSystemRepositorySettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "chunk_size": + if err := dec.Decode(&s.ChunkSize); err != nil { + return fmt.Errorf("%s | %w", "ChunkSize", err) + } + + case "compress": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Compress", err) + } + s.Compress = &value + case bool: + s.Compress = &v + } + + case "location": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Location", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Location = o + + case "max_number_of_snapshots": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "MaxNumberOfSnapshots", err) + } + s.MaxNumberOfSnapshots = &value + case float64: + f := int(v) + s.MaxNumberOfSnapshots = &f + } + + case "max_restore_bytes_per_sec": + if err := dec.Decode(&s.MaxRestoreBytesPerSec); err != nil { + return fmt.Errorf("%s | %w", "MaxRestoreBytesPerSec", err) + } + + case "max_snapshot_bytes_per_sec": + if err := dec.Decode(&s.MaxSnapshotBytesPerSec); err != nil { + return fmt.Errorf("%s | %w", "MaxSnapshotBytesPerSec", err) + } + + case "readonly": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Readonly", err) + } + s.Readonly = &value + case bool: + s.Readonly = &v + } + + } + } + return nil +} + +// NewSharedFileSystemRepositorySettings returns a SharedFileSystemRepositorySettings. +func NewSharedFileSystemRepositorySettings() *SharedFileSystemRepositorySettings { + r := &SharedFileSystemRepositorySettings{} + + return r +} diff --git a/typedapi/types/shingletokenfilter.go b/typedapi/types/shingletokenfilter.go index 00c821cd04..03e522297e 100644 --- a/typedapi/types/shingletokenfilter.go +++ b/typedapi/types/shingletokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShingleTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L87-L95 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L87-L95 type ShingleTokenFilter struct { FillerToken *string `json:"filler_token,omitempty"` MaxShingleSize string `json:"max_shingle_size,omitempty"` @@ -60,7 +61,7 @@ func (s *ShingleTokenFilter) UnmarshalJSON(data []byte) error { case "filler_token": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FillerToken", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,7 +73,7 @@ func (s *ShingleTokenFilter) UnmarshalJSON(data []byte) error { case "max_shingle_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxShingleSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,7 +85,7 @@ func (s *ShingleTokenFilter) UnmarshalJSON(data []byte) error { case "min_shingle_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MinShingleSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -100,7 +101,7 @@ func (s *ShingleTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OutputUnigrams", err) } s.OutputUnigrams = &value case bool: @@ -114,7 +115,7 @@ func (s *ShingleTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OutputUnigramsIfNoShingles", err) } s.OutputUnigramsIfNoShingles = &value case bool: @@ -124,7 +125,7 @@ func (s *ShingleTokenFilter) UnmarshalJSON(data []byte) error { case "token_separator": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TokenSeparator", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -135,12 +136,12 @@ func (s *ShingleTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/shortnumberproperty.go b/typedapi/types/shortnumberproperty.go index bdea533197..d3418b83c2 100644 --- a/typedapi/types/shortnumberproperty.go +++ b/typedapi/types/shortnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // ShortNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L159-L162 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L159-L162 type ShortNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -84,7 +85,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -100,7 +101,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -113,13 +114,13 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -130,7 +131,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -139,7 +140,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -457,7 +458,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -472,7 +473,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -486,7 +487,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -498,17 +499,17 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": if err := dec.Decode(&s.NullValue); err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } case "on_script_error": if err := dec.Decode(&s.OnScriptError); err != nil { - return err + return fmt.Errorf("%s | %w", "OnScriptError", err) } case "properties": @@ -821,7 +822,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -830,7 +831,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -839,7 +840,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -847,7 +848,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -857,7 +858,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -873,7 +874,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -887,7 +888,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesDimension", err) } s.TimeSeriesDimension = &value case bool: @@ -896,12 +897,12 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { case "time_series_metric": if err := dec.Decode(&s.TimeSeriesMetric); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesMetric", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/shrinkconfiguration.go b/typedapi/types/shrinkconfiguration.go index c83a951530..d18928c4e7 100644 --- a/typedapi/types/shrinkconfiguration.go +++ b/typedapi/types/shrinkconfiguration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ShrinkConfiguration type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/_types/Phase.ts#L60-L62 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/_types/Phase.ts#L60-L62 type ShrinkConfiguration struct { NumberOfShards int `json:"number_of_shards"` } @@ -58,7 +59,7 @@ func (s *ShrinkConfiguration) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfShards", err) } s.NumberOfShards = value case float64: diff --git a/typedapi/types/significantlongtermsaggregate.go b/typedapi/types/significantlongtermsaggregate.go index 15ae6916fa..90f86e50ac 100644 --- a/typedapi/types/significantlongtermsaggregate.go +++ b/typedapi/types/significantlongtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SignificantLongTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L588-L590 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L588-L590 type SignificantLongTermsAggregate struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantLongTermsBucket `json:"buckets"` @@ -60,7 +61,7 @@ func (s *SignificantLongTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BgCount", err) } s.BgCount = &value case float64: @@ -78,13 +79,13 @@ func (s *SignificantLongTermsAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]SignificantLongTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []SignificantLongTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -96,7 +97,7 @@ func (s *SignificantLongTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = &value case float64: @@ -106,7 +107,7 @@ func (s *SignificantLongTermsAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/significantlongtermsbucket.go b/typedapi/types/significantlongtermsbucket.go index bb9a4e7478..8fdb079e45 100644 --- a/typedapi/types/significantlongtermsbucket.go +++ b/typedapi/types/significantlongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // SignificantLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L597-L600 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L597-L600 type SignificantLongTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` BgCount int64 `json:"bg_count"` @@ -64,7 +64,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BgCount", err) } s.BgCount = value case float64: @@ -79,7 +79,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -94,7 +94,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } s.Key = value case float64: @@ -105,7 +105,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { case "key_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "KeyAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -121,7 +121,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Score", err) } f := Float64(value) s.Score = f @@ -144,490 +144,490 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -637,7 +637,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/significantstringtermsaggregate.go b/typedapi/types/significantstringtermsaggregate.go index b0e13a9603..bbfbe642fa 100644 --- a/typedapi/types/significantstringtermsaggregate.go +++ b/typedapi/types/significantstringtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SignificantStringTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L602-L604 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L602-L604 type SignificantStringTermsAggregate struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantStringTermsBucket `json:"buckets"` @@ -60,7 +61,7 @@ func (s *SignificantStringTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BgCount", err) } s.BgCount = &value case float64: @@ -78,13 +79,13 @@ func (s *SignificantStringTermsAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]SignificantStringTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []SignificantStringTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -96,7 +97,7 @@ func (s *SignificantStringTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = &value case float64: @@ -106,7 +107,7 @@ func (s *SignificantStringTermsAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/significantstringtermsbucket.go b/typedapi/types/significantstringtermsbucket.go index b19b7882f4..b11a12ad93 100644 --- a/typedapi/types/significantstringtermsbucket.go +++ b/typedapi/types/significantstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // SignificantStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L606-L608 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L606-L608 type SignificantStringTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` BgCount int64 `json:"bg_count"` @@ -63,7 +63,7 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BgCount", err) } s.BgCount = value case float64: @@ -78,7 +78,7 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -89,7 +89,7 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { case "key": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +105,7 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Score", err) } f := Float64(value) s.Score = f @@ -128,490 +128,490 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -621,7 +621,7 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/significanttermsaggregatebasesignificantlongtermsbucket.go b/typedapi/types/significanttermsaggregatebasesignificantlongtermsbucket.go index 09b1732131..b263773b1b 100644 --- a/typedapi/types/significanttermsaggregatebasesignificantlongtermsbucket.go +++ b/typedapi/types/significanttermsaggregatebasesignificantlongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SignificantTermsAggregateBaseSignificantLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L581-L586 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L581-L586 type SignificantTermsAggregateBaseSignificantLongTermsBucket struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantLongTermsBucket `json:"buckets"` @@ -60,7 +61,7 @@ func (s *SignificantTermsAggregateBaseSignificantLongTermsBucket) UnmarshalJSON( case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BgCount", err) } s.BgCount = &value case float64: @@ -78,13 +79,13 @@ func (s *SignificantTermsAggregateBaseSignificantLongTermsBucket) UnmarshalJSON( case '{': o := make(map[string]SignificantLongTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []SignificantLongTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -96,7 +97,7 @@ func (s *SignificantTermsAggregateBaseSignificantLongTermsBucket) UnmarshalJSON( case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = &value case float64: @@ -106,7 +107,7 @@ func (s *SignificantTermsAggregateBaseSignificantLongTermsBucket) UnmarshalJSON( case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/significanttermsaggregatebasesignificantstringtermsbucket.go b/typedapi/types/significanttermsaggregatebasesignificantstringtermsbucket.go index fbf4a55358..166af80652 100644 --- a/typedapi/types/significanttermsaggregatebasesignificantstringtermsbucket.go +++ b/typedapi/types/significanttermsaggregatebasesignificantstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SignificantTermsAggregateBaseSignificantStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L581-L586 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L581-L586 type SignificantTermsAggregateBaseSignificantStringTermsBucket struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantStringTermsBucket `json:"buckets"` @@ -60,7 +61,7 @@ func (s *SignificantTermsAggregateBaseSignificantStringTermsBucket) UnmarshalJSO case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BgCount", err) } s.BgCount = &value case float64: @@ -78,13 +79,13 @@ func (s *SignificantTermsAggregateBaseSignificantStringTermsBucket) UnmarshalJSO case '{': o := make(map[string]SignificantStringTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []SignificantStringTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -96,7 +97,7 @@ func (s *SignificantTermsAggregateBaseSignificantStringTermsBucket) UnmarshalJSO case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = &value case float64: @@ -106,7 +107,7 @@ func (s *SignificantTermsAggregateBaseSignificantStringTermsBucket) UnmarshalJSO case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/significanttermsaggregatebasevoid.go b/typedapi/types/significanttermsaggregatebasevoid.go index 52283f4651..756fc31c64 100644 --- a/typedapi/types/significanttermsaggregatebasevoid.go +++ b/typedapi/types/significanttermsaggregatebasevoid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SignificantTermsAggregateBaseVoid type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L581-L586 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L581-L586 type SignificantTermsAggregateBaseVoid struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsVoid `json:"buckets"` @@ -60,7 +61,7 @@ func (s *SignificantTermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BgCount", err) } s.BgCount = &value case float64: @@ -78,13 +79,13 @@ func (s *SignificantTermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]interface{}, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []interface{}{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -96,7 +97,7 @@ func (s *SignificantTermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = &value case float64: @@ -106,7 +107,7 @@ func (s *SignificantTermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/significanttermsaggregation.go b/typedapi/types/significanttermsaggregation.go index ebfac3e16c..d4889b4333 100644 --- a/typedapi/types/significanttermsaggregation.go +++ b/typedapi/types/significanttermsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SignificantTermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L770-L834 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L770-L834 type SignificantTermsAggregation struct { // BackgroundFilter A background filter that can be used to focus in on significant terms within // a narrower context, instead of the entire index. @@ -96,12 +97,12 @@ func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { case "background_filter": if err := dec.Decode(&s.BackgroundFilter); err != nil { - return err + return fmt.Errorf("%s | %w", "BackgroundFilter", err) } case "chi_square": if err := dec.Decode(&s.ChiSquare); err != nil { - return err + return fmt.Errorf("%s | %w", "ChiSquare", err) } case "exclude": @@ -110,44 +111,44 @@ func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Exclude", err) } s.Exclude = append(s.Exclude, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Exclude); err != nil { - return err + return fmt.Errorf("%s | %w", "Exclude", err) } } case "execution_hint": if err := dec.Decode(&s.ExecutionHint); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutionHint", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "gnd": if err := dec.Decode(&s.Gnd); err != nil { - return err + return fmt.Errorf("%s | %w", "Gnd", err) } case "include": if err := dec.Decode(&s.Include); err != nil { - return err + return fmt.Errorf("%s | %w", "Include", err) } case "jlh": if err := dec.Decode(&s.Jlh); err != nil { - return err + return fmt.Errorf("%s | %w", "Jlh", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min_doc_count": @@ -157,7 +158,7 @@ func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocCount", err) } s.MinDocCount = &value case float64: @@ -167,13 +168,13 @@ func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { case "mutual_information": if err := dec.Decode(&s.MutualInformation); err != nil { - return err + return fmt.Errorf("%s | %w", "MutualInformation", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -184,12 +185,12 @@ func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { case "percentage": if err := dec.Decode(&s.Percentage); err != nil { - return err + return fmt.Errorf("%s | %w", "Percentage", err) } case "script_heuristic": if err := dec.Decode(&s.ScriptHeuristic); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptHeuristic", err) } case "shard_min_doc_count": @@ -199,7 +200,7 @@ func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardMinDocCount", err) } s.ShardMinDocCount = &value case float64: @@ -215,7 +216,7 @@ func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardSize", err) } s.ShardSize = &value case float64: @@ -231,7 +232,7 @@ func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: diff --git a/typedapi/types/significanttextaggregation.go b/typedapi/types/significanttextaggregation.go index a918999f1b..3786e970a7 100644 --- a/typedapi/types/significanttextaggregation.go +++ b/typedapi/types/significanttextaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SignificantTextAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L836-L908 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L836-L908 type SignificantTextAggregation struct { // BackgroundFilter A background filter that can be used to focus in on significant terms within // a narrower context, instead of the entire index. @@ -100,12 +101,12 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { case "background_filter": if err := dec.Decode(&s.BackgroundFilter); err != nil { - return err + return fmt.Errorf("%s | %w", "BackgroundFilter", err) } case "chi_square": if err := dec.Decode(&s.ChiSquare); err != nil { - return err + return fmt.Errorf("%s | %w", "ChiSquare", err) } case "exclude": @@ -114,24 +115,24 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Exclude", err) } s.Exclude = append(s.Exclude, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Exclude); err != nil { - return err + return fmt.Errorf("%s | %w", "Exclude", err) } } case "execution_hint": if err := dec.Decode(&s.ExecutionHint); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutionHint", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "filter_duplicate_text": @@ -141,7 +142,7 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FilterDuplicateText", err) } s.FilterDuplicateText = &value case bool: @@ -150,22 +151,22 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { case "gnd": if err := dec.Decode(&s.Gnd); err != nil { - return err + return fmt.Errorf("%s | %w", "Gnd", err) } case "include": if err := dec.Decode(&s.Include); err != nil { - return err + return fmt.Errorf("%s | %w", "Include", err) } case "jlh": if err := dec.Decode(&s.Jlh); err != nil { - return err + return fmt.Errorf("%s | %w", "Jlh", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min_doc_count": @@ -175,7 +176,7 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocCount", err) } s.MinDocCount = &value case float64: @@ -185,13 +186,13 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { case "mutual_information": if err := dec.Decode(&s.MutualInformation); err != nil { - return err + return fmt.Errorf("%s | %w", "MutualInformation", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -202,12 +203,12 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { case "percentage": if err := dec.Decode(&s.Percentage); err != nil { - return err + return fmt.Errorf("%s | %w", "Percentage", err) } case "script_heuristic": if err := dec.Decode(&s.ScriptHeuristic); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptHeuristic", err) } case "shard_min_doc_count": @@ -217,7 +218,7 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardMinDocCount", err) } s.ShardMinDocCount = &value case float64: @@ -233,7 +234,7 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardSize", err) } s.ShardSize = &value case float64: @@ -249,7 +250,7 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -263,13 +264,13 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "SourceFields", err) } s.SourceFields = append(s.SourceFields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.SourceFields); err != nil { - return err + return fmt.Errorf("%s | %w", "SourceFields", err) } } diff --git a/typedapi/types/simpleanalyzer.go b/typedapi/types/simpleanalyzer.go index 6595ab5961..1c4ac06b52 100644 --- a/typedapi/types/simpleanalyzer.go +++ b/typedapi/types/simpleanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SimpleAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/analyzers.ts#L83-L86 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/analyzers.ts#L83-L86 type SimpleAnalyzer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` @@ -52,12 +53,12 @@ func (s *SimpleAnalyzer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/simplemovingaverageaggregation.go b/typedapi/types/simplemovingaverageaggregation.go index 62828dcc36..5c4d6e14e3 100644 --- a/typedapi/types/simplemovingaverageaggregation.go +++ b/typedapi/types/simplemovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SimpleMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L247-L250 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L247-L250 type SimpleMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -68,13 +69,13 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,12 +86,12 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "minimize": @@ -100,7 +101,7 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Minimize", err) } s.Minimize = &value case bool: @@ -109,13 +110,13 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "model": if err := dec.Decode(&s.Model); err != nil { - return err + return fmt.Errorf("%s | %w", "Model", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -132,7 +133,7 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Predict", err) } s.Predict = &value case float64: @@ -142,7 +143,7 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "settings": if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "window": @@ -153,7 +154,7 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Window", err) } s.Window = &value case float64: diff --git a/typedapi/types/simplequerystringflags.go b/typedapi/types/simplequerystringflags.go index 6c65dc963d..5bf7df1c2a 100644 --- a/typedapi/types/simplequerystringflags.go +++ b/typedapi/types/simplequerystringflags.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // SimpleQueryStringFlags type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L702-L706 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L702-L706 type SimpleQueryStringFlags PipeSeparatedFlagsSimpleQueryStringFlag diff --git a/typedapi/types/simplequerystringquery.go b/typedapi/types/simplequerystringquery.go index dcd7ba9e46..9d47c625b2 100644 --- a/typedapi/types/simplequerystringquery.go +++ b/typedapi/types/simplequerystringquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SimpleQueryStringQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/fulltext.ts#L765-L830 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/fulltext.ts#L765-L830 type SimpleQueryStringQuery struct { // AnalyzeWildcard If `true`, the query attempts to analyze wildcard terms in the query string. AnalyzeWildcard *bool `json:"analyze_wildcard,omitempty"` @@ -101,7 +102,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AnalyzeWildcard", err) } s.AnalyzeWildcard = &value case bool: @@ -111,7 +112,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -127,7 +128,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AutoGenerateSynonymsPhraseQuery", err) } s.AutoGenerateSynonymsPhraseQuery = &value case bool: @@ -141,7 +142,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -152,17 +153,17 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case "default_operator": if err := dec.Decode(&s.DefaultOperator); err != nil { - return err + return fmt.Errorf("%s | %w", "DefaultOperator", err) } case "fields": if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "flags": if err := dec.Decode(&s.Flags); err != nil { - return err + return fmt.Errorf("%s | %w", "Flags", err) } case "fuzzy_max_expansions": @@ -173,7 +174,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FuzzyMaxExpansions", err) } s.FuzzyMaxExpansions = &value case float64: @@ -189,7 +190,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FuzzyPrefixLength", err) } s.FuzzyPrefixLength = &value case float64: @@ -204,7 +205,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FuzzyTranspositions", err) } s.FuzzyTranspositions = &value case bool: @@ -218,7 +219,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lenient", err) } s.Lenient = &value case bool: @@ -227,13 +228,13 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case "minimum_should_match": if err := dec.Decode(&s.MinimumShouldMatch); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatch", err) } case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -245,7 +246,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -257,7 +258,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case "quote_field_suffix": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QuoteFieldSuffix", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/simplevalueaggregate.go b/typedapi/types/simplevalueaggregate.go index 3529861a33..a3d25182cf 100644 --- a/typedapi/types/simplevalueaggregate.go +++ b/typedapi/types/simplevalueaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SimpleValueAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L224-L225 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L224-L225 type SimpleValueAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to @@ -57,18 +58,18 @@ func (s *SimpleValueAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/simulatedactions.go b/typedapi/types/simulatedactions.go index 380107f3a9..154da841fa 100644 --- a/typedapi/types/simulatedactions.go +++ b/typedapi/types/simulatedactions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SimulatedActions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Action.ts#L96-L100 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Action.ts#L96-L100 type SimulatedActions struct { Actions []string `json:"actions"` All *SimulatedActions `json:"all,omitempty"` @@ -54,12 +55,12 @@ func (s *SimulatedActions) UnmarshalJSON(data []byte) error { case "actions": if err := dec.Decode(&s.Actions); err != nil { - return err + return fmt.Errorf("%s | %w", "Actions", err) } case "all": if err := dec.Decode(&s.All); err != nil { - return err + return fmt.Errorf("%s | %w", "All", err) } case "use_all": @@ -69,7 +70,7 @@ func (s *SimulatedActions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UseAll", err) } s.UseAll = value case bool: diff --git a/typedapi/types/simulateingest.go b/typedapi/types/simulateingest.go index f0a6acd39f..a866f604de 100644 --- a/typedapi/types/simulateingest.go +++ b/typedapi/types/simulateingest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SimulateIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/simulate/types.ts#L28-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/simulate/types.ts#L28-L31 type SimulateIngest struct { Pipeline *string `json:"pipeline,omitempty"` Timestamp DateTime `json:"timestamp"` @@ -52,12 +53,12 @@ func (s *SimulateIngest) UnmarshalJSON(data []byte) error { case "pipeline": if err := dec.Decode(&s.Pipeline); err != nil { - return err + return fmt.Errorf("%s | %w", "Pipeline", err) } case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } } diff --git a/typedapi/types/sizefield.go b/typedapi/types/sizefield.go index c2d1be9cee..bcd085ea5c 100644 --- a/typedapi/types/sizefield.go +++ b/typedapi/types/sizefield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SizeField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/meta-fields.ts#L54-L56 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/meta-fields.ts#L54-L56 type SizeField struct { Enabled bool `json:"enabled"` } @@ -57,7 +58,7 @@ func (s *SizeField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: diff --git a/typedapi/types/slackaction.go b/typedapi/types/slackaction.go index dc25c5635e..00fbe7e768 100644 --- a/typedapi/types/slackaction.go +++ b/typedapi/types/slackaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SlackAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L91-L94 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L91-L94 type SlackAction struct { Account *string `json:"account,omitempty"` Message SlackMessage `json:"message"` @@ -54,7 +55,7 @@ func (s *SlackAction) UnmarshalJSON(data []byte) error { case "account": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Account", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -65,7 +66,7 @@ func (s *SlackAction) UnmarshalJSON(data []byte) error { case "message": if err := dec.Decode(&s.Message); err != nil { - return err + return fmt.Errorf("%s | %w", "Message", err) } } diff --git a/typedapi/types/slackattachment.go b/typedapi/types/slackattachment.go index eb3ae5ce50..a82d6c6ccc 100644 --- a/typedapi/types/slackattachment.go +++ b/typedapi/types/slackattachment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SlackAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L101-L117 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L101-L117 type SlackAttachment struct { AuthorIcon *string `json:"author_icon,omitempty"` AuthorLink *string `json:"author_link,omitempty"` @@ -67,7 +68,7 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "author_icon": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AuthorIcon", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "author_link": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AuthorLink", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -91,7 +92,7 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "author_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AuthorName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -103,7 +104,7 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "color": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Color", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -115,7 +116,7 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "fallback": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Fallback", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -126,13 +127,13 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "fields": if err := dec.Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } case "footer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Footer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -144,7 +145,7 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "footer_icon": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FooterIcon", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -156,7 +157,7 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "image_url": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ImageUrl", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -168,7 +169,7 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "pretext": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pretext", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -180,7 +181,7 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -192,7 +193,7 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "thumb_url": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ThumbUrl", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -204,7 +205,7 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "title": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Title", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -216,7 +217,7 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "title_link": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TitleLink", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -227,7 +228,7 @@ func (s *SlackAttachment) UnmarshalJSON(data []byte) error { case "ts": if err := dec.Decode(&s.Ts); err != nil { - return err + return fmt.Errorf("%s | %w", "Ts", err) } } diff --git a/typedapi/types/slackattachmentfield.go b/typedapi/types/slackattachmentfield.go index d9438b9862..a4fbc9b03b 100644 --- a/typedapi/types/slackattachmentfield.go +++ b/typedapi/types/slackattachmentfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SlackAttachmentField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L119-L123 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L119-L123 type SlackAttachmentField struct { Int bool `json:"short"` Title string `json:"title"` @@ -59,7 +60,7 @@ func (s *SlackAttachmentField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Int", err) } s.Int = value case bool: @@ -69,7 +70,7 @@ func (s *SlackAttachmentField) UnmarshalJSON(data []byte) error { case "title": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Title", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -81,7 +82,7 @@ func (s *SlackAttachmentField) UnmarshalJSON(data []byte) error { case "value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/slackdynamicattachment.go b/typedapi/types/slackdynamicattachment.go index f9a1ec00a5..bb784dce9e 100644 --- a/typedapi/types/slackdynamicattachment.go +++ b/typedapi/types/slackdynamicattachment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SlackDynamicAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L125-L128 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L125-L128 type SlackDynamicAttachment struct { AttachmentTemplate SlackAttachment `json:"attachment_template"` ListPath string `json:"list_path"` @@ -53,13 +54,13 @@ func (s *SlackDynamicAttachment) UnmarshalJSON(data []byte) error { case "attachment_template": if err := dec.Decode(&s.AttachmentTemplate); err != nil { - return err + return fmt.Errorf("%s | %w", "AttachmentTemplate", err) } case "list_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ListPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/slackmessage.go b/typedapi/types/slackmessage.go index 2c2f3c64d4..8fc05b22db 100644 --- a/typedapi/types/slackmessage.go +++ b/typedapi/types/slackmessage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SlackMessage type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L130-L137 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L130-L137 type SlackMessage struct { Attachments []SlackAttachment `json:"attachments"` DynamicAttachments *SlackDynamicAttachment `json:"dynamic_attachments,omitempty"` @@ -57,18 +58,18 @@ func (s *SlackMessage) UnmarshalJSON(data []byte) error { case "attachments": if err := dec.Decode(&s.Attachments); err != nil { - return err + return fmt.Errorf("%s | %w", "Attachments", err) } case "dynamic_attachments": if err := dec.Decode(&s.DynamicAttachments); err != nil { - return err + return fmt.Errorf("%s | %w", "DynamicAttachments", err) } case "from": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,7 +81,7 @@ func (s *SlackMessage) UnmarshalJSON(data []byte) error { case "icon": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Icon", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -92,7 +93,7 @@ func (s *SlackMessage) UnmarshalJSON(data []byte) error { case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -103,7 +104,7 @@ func (s *SlackMessage) UnmarshalJSON(data []byte) error { case "to": if err := dec.Decode(&s.To); err != nil { - return err + return fmt.Errorf("%s | %w", "To", err) } } diff --git a/typedapi/types/slackresult.go b/typedapi/types/slackresult.go index 33c1908bc4..19555bfe2a 100644 --- a/typedapi/types/slackresult.go +++ b/typedapi/types/slackresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SlackResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L96-L99 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L96-L99 type SlackResult struct { Account *string `json:"account,omitempty"` Message SlackMessage `json:"message"` @@ -54,7 +55,7 @@ func (s *SlackResult) UnmarshalJSON(data []byte) error { case "account": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Account", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -65,7 +66,7 @@ func (s *SlackResult) UnmarshalJSON(data []byte) error { case "message": if err := dec.Decode(&s.Message); err != nil { - return err + return fmt.Errorf("%s | %w", "Message", err) } } diff --git a/typedapi/types/slicedscroll.go b/typedapi/types/slicedscroll.go index 730a2743dd..f5099d74dd 100644 --- a/typedapi/types/slicedscroll.go +++ b/typedapi/types/slicedscroll.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SlicedScroll type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/SlicedScroll.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/SlicedScroll.ts#L23-L27 type SlicedScroll struct { Field *string `json:"field,omitempty"` Id string `json:"id"` @@ -54,12 +55,12 @@ func (s *SlicedScroll) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "max": @@ -70,7 +71,7 @@ func (s *SlicedScroll) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } s.Max = value case float64: diff --git a/typedapi/types/slices.go b/typedapi/types/slices.go index 04b62dfd70..edcdd8a48e 100644 --- a/typedapi/types/slices.go +++ b/typedapi/types/slices.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // int // slicescalculation.SlicesCalculation // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L361-L366 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L361-L366 type Slices interface{} diff --git a/typedapi/types/slm.go b/typedapi/types/slm.go index b99487776c..9013434eb0 100644 --- a/typedapi/types/slm.go +++ b/typedapi/types/slm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Slm type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L449-L452 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L449-L452 type Slm struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -60,7 +61,7 @@ func (s *Slm) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -74,7 +75,7 @@ func (s *Slm) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -89,7 +90,7 @@ func (s *Slm) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PolicyCount", err) } s.PolicyCount = &value case float64: @@ -99,7 +100,7 @@ func (s *Slm) UnmarshalJSON(data []byte) error { case "policy_stats": if err := dec.Decode(&s.PolicyStats); err != nil { - return err + return fmt.Errorf("%s | %w", "PolicyStats", err) } } diff --git a/typedapi/types/slmindicator.go b/typedapi/types/slmindicator.go index cf4156cf31..57e0c21d25 100644 --- a/typedapi/types/slmindicator.go +++ b/typedapi/types/slmindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SlmIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L155-L159 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L155-L159 type SlmIndicator struct { Details *SlmIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` @@ -58,28 +59,28 @@ func (s *SlmIndicator) UnmarshalJSON(data []byte) error { case "details": if err := dec.Decode(&s.Details); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } case "diagnosis": if err := dec.Decode(&s.Diagnosis); err != nil { - return err + return fmt.Errorf("%s | %w", "Diagnosis", err) } case "impacts": if err := dec.Decode(&s.Impacts); err != nil { - return err + return fmt.Errorf("%s | %w", "Impacts", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "symptom": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Symptom", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/slmindicatordetails.go b/typedapi/types/slmindicatordetails.go index ca8afd60dd..6626a8752f 100644 --- a/typedapi/types/slmindicatordetails.go +++ b/typedapi/types/slmindicatordetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SlmIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L160-L164 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L160-L164 type SlmIndicatorDetails struct { Policies int64 `json:"policies"` SlmStatus lifecycleoperationmode.LifecycleOperationMode `json:"slm_status"` @@ -61,7 +62,7 @@ func (s *SlmIndicatorDetails) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Policies", err) } s.Policies = value case float64: @@ -71,12 +72,12 @@ func (s *SlmIndicatorDetails) UnmarshalJSON(data []byte) error { case "slm_status": if err := dec.Decode(&s.SlmStatus); err != nil { - return err + return fmt.Errorf("%s | %w", "SlmStatus", err) } case "unhealthy_policies": if err := dec.Decode(&s.UnhealthyPolicies); err != nil { - return err + return fmt.Errorf("%s | %w", "UnhealthyPolicies", err) } } diff --git a/typedapi/types/slmindicatorunhealthypolicies.go b/typedapi/types/slmindicatorunhealthypolicies.go index 3b0acea6ea..ec122448fa 100644 --- a/typedapi/types/slmindicatorunhealthypolicies.go +++ b/typedapi/types/slmindicatorunhealthypolicies.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SlmIndicatorUnhealthyPolicies type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/health_report/types.ts#L166-L169 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/health_report/types.ts#L166-L169 type SlmIndicatorUnhealthyPolicies struct { Count int64 `json:"count"` InvocationsSinceLastSuccess map[string]int64 `json:"invocations_since_last_success,omitempty"` @@ -58,7 +59,7 @@ func (s *SlmIndicatorUnhealthyPolicies) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -71,7 +72,7 @@ func (s *SlmIndicatorUnhealthyPolicies) UnmarshalJSON(data []byte) error { s.InvocationsSinceLastSuccess = make(map[string]int64, 0) } if err := dec.Decode(&s.InvocationsSinceLastSuccess); err != nil { - return err + return fmt.Errorf("%s | %w", "InvocationsSinceLastSuccess", err) } } diff --git a/typedapi/types/slmpolicy.go b/typedapi/types/slmpolicy.go index 7245c37919..4ba7eca45b 100644 --- a/typedapi/types/slmpolicy.go +++ b/typedapi/types/slmpolicy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SLMPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/_types/SnapshotLifecycle.ts#L76-L82 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/_types/SnapshotLifecycle.ts#L76-L82 type SLMPolicy struct { Config *Configuration `json:"config,omitempty"` Name string `json:"name"` @@ -56,18 +57,18 @@ func (s *SLMPolicy) UnmarshalJSON(data []byte) error { case "config": if err := dec.Decode(&s.Config); err != nil { - return err + return fmt.Errorf("%s | %w", "Config", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "repository": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Repository", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,12 +79,12 @@ func (s *SLMPolicy) UnmarshalJSON(data []byte) error { case "retention": if err := dec.Decode(&s.Retention); err != nil { - return err + return fmt.Errorf("%s | %w", "Retention", err) } case "schedule": if err := dec.Decode(&s.Schedule); err != nil { - return err + return fmt.Errorf("%s | %w", "Schedule", err) } } diff --git a/typedapi/types/slowlogsettings.go b/typedapi/types/slowlogsettings.go index 8113a6f2ad..54c7491bc4 100644 --- a/typedapi/types/slowlogsettings.go +++ b/typedapi/types/slowlogsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SlowlogSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L475-L480 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L479-L484 type SlowlogSettings struct { Level *string `json:"level,omitempty"` Reformat *bool `json:"reformat,omitempty"` @@ -56,7 +57,7 @@ func (s *SlowlogSettings) UnmarshalJSON(data []byte) error { case "level": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Level", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,7 +73,7 @@ func (s *SlowlogSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Reformat", err) } s.Reformat = &value case bool: @@ -87,7 +88,7 @@ func (s *SlowlogSettings) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } s.Source = &value case float64: @@ -97,7 +98,7 @@ func (s *SlowlogSettings) UnmarshalJSON(data []byte) error { case "threshold": if err := dec.Decode(&s.Threshold); err != nil { - return err + return fmt.Errorf("%s | %w", "Threshold", err) } } diff --git a/typedapi/types/slowlogtresholdlevels.go b/typedapi/types/slowlogtresholdlevels.go index 405707f42e..7a0508bf60 100644 --- a/typedapi/types/slowlogtresholdlevels.go +++ b/typedapi/types/slowlogtresholdlevels.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SlowlogTresholdLevels type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L487-L492 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L491-L496 type SlowlogTresholdLevels struct { Debug Duration `json:"debug,omitempty"` Info Duration `json:"info,omitempty"` @@ -54,22 +55,22 @@ func (s *SlowlogTresholdLevels) UnmarshalJSON(data []byte) error { case "debug": if err := dec.Decode(&s.Debug); err != nil { - return err + return fmt.Errorf("%s | %w", "Debug", err) } case "info": if err := dec.Decode(&s.Info); err != nil { - return err + return fmt.Errorf("%s | %w", "Info", err) } case "trace": if err := dec.Decode(&s.Trace); err != nil { - return err + return fmt.Errorf("%s | %w", "Trace", err) } case "warn": if err := dec.Decode(&s.Warn); err != nil { - return err + return fmt.Errorf("%s | %w", "Warn", err) } } diff --git a/typedapi/types/slowlogtresholds.go b/typedapi/types/slowlogtresholds.go index 6457043abc..3f8c91fbeb 100644 --- a/typedapi/types/slowlogtresholds.go +++ b/typedapi/types/slowlogtresholds.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // SlowlogTresholds type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L482-L485 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L486-L489 type SlowlogTresholds struct { Fetch *SlowlogTresholdLevels `json:"fetch,omitempty"` Query *SlowlogTresholdLevels `json:"query,omitempty"` diff --git a/typedapi/types/smoothingmodelcontainer.go b/typedapi/types/smoothingmodelcontainer.go index d12076b026..25a3081d0e 100644 --- a/typedapi/types/smoothingmodelcontainer.go +++ b/typedapi/types/smoothingmodelcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // SmoothingModelContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L442-L458 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L442-L458 type SmoothingModelContainer struct { // Laplace A smoothing model that uses an additive smoothing where a constant (typically // `1.0` or smaller) is added to all counts to balance weights. diff --git a/typedapi/types/snapshotindexstats.go b/typedapi/types/snapshotindexstats.go index a25f726608..334d9d0edf 100644 --- a/typedapi/types/snapshotindexstats.go +++ b/typedapi/types/snapshotindexstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // SnapshotIndexStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotIndexStats.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotIndexStats.ts#L25-L29 type SnapshotIndexStats struct { Shards map[string]SnapshotShardsStatus `json:"shards"` ShardsStats SnapshotShardsStats `json:"shards_stats"` diff --git a/typedapi/types/snapshotinfo.go b/typedapi/types/snapshotinfo.go index 3da72375a6..d6c62b20aa 100644 --- a/typedapi/types/snapshotinfo.go +++ b/typedapi/types/snapshotinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SnapshotInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotInfo.ts#L41-L71 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotInfo.ts#L41-L71 type SnapshotInfo struct { DataStreams []string `json:"data_streams"` Duration Duration `json:"duration,omitempty"` @@ -72,37 +73,37 @@ func (s *SnapshotInfo) UnmarshalJSON(data []byte) error { case "data_streams": if err := dec.Decode(&s.DataStreams); err != nil { - return err + return fmt.Errorf("%s | %w", "DataStreams", err) } case "duration": if err := dec.Decode(&s.Duration); err != nil { - return err + return fmt.Errorf("%s | %w", "Duration", err) } case "duration_in_millis": if err := dec.Decode(&s.DurationInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "DurationInMillis", err) } case "end_time": if err := dec.Decode(&s.EndTime); err != nil { - return err + return fmt.Errorf("%s | %w", "EndTime", err) } case "end_time_in_millis": if err := dec.Decode(&s.EndTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "EndTimeInMillis", err) } case "failures": if err := dec.Decode(&s.Failures); err != nil { - return err + return fmt.Errorf("%s | %w", "Failures", err) } case "feature_states": if err := dec.Decode(&s.FeatureStates); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureStates", err) } case "include_global_state": @@ -112,7 +113,7 @@ func (s *SnapshotInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeGlobalState", err) } s.IncludeGlobalState = &value case bool: @@ -124,23 +125,23 @@ func (s *SnapshotInfo) UnmarshalJSON(data []byte) error { s.IndexDetails = make(map[string]IndexDetails, 0) } if err := dec.Decode(&s.IndexDetails); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexDetails", err) } case "indices": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -151,33 +152,33 @@ func (s *SnapshotInfo) UnmarshalJSON(data []byte) error { case "repository": if err := dec.Decode(&s.Repository); err != nil { - return err + return fmt.Errorf("%s | %w", "Repository", err) } case "shards": if err := dec.Decode(&s.Shards); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards", err) } case "snapshot": if err := dec.Decode(&s.Snapshot); err != nil { - return err + return fmt.Errorf("%s | %w", "Snapshot", err) } case "start_time": if err := dec.Decode(&s.StartTime); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTime", err) } case "start_time_in_millis": if err := dec.Decode(&s.StartTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTimeInMillis", err) } case "state": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -188,17 +189,17 @@ func (s *SnapshotInfo) UnmarshalJSON(data []byte) error { case "uuid": if err := dec.Decode(&s.Uuid); err != nil { - return err + return fmt.Errorf("%s | %w", "Uuid", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "version_id": if err := dec.Decode(&s.VersionId); err != nil { - return err + return fmt.Errorf("%s | %w", "VersionId", err) } } diff --git a/typedapi/types/snapshotlifecycle.go b/typedapi/types/snapshotlifecycle.go index c460cc0699..fbae5e36b6 100644 --- a/typedapi/types/snapshotlifecycle.go +++ b/typedapi/types/snapshotlifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SnapshotLifecycle type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/_types/SnapshotLifecycle.ts#L38-L49 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/_types/SnapshotLifecycle.ts#L38-L49 type SnapshotLifecycle struct { InProgress *InProgress `json:"in_progress,omitempty"` LastFailure *Invocation `json:"last_failure,omitempty"` @@ -60,52 +61,52 @@ func (s *SnapshotLifecycle) UnmarshalJSON(data []byte) error { case "in_progress": if err := dec.Decode(&s.InProgress); err != nil { - return err + return fmt.Errorf("%s | %w", "InProgress", err) } case "last_failure": if err := dec.Decode(&s.LastFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "LastFailure", err) } case "last_success": if err := dec.Decode(&s.LastSuccess); err != nil { - return err + return fmt.Errorf("%s | %w", "LastSuccess", err) } case "modified_date": if err := dec.Decode(&s.ModifiedDate); err != nil { - return err + return fmt.Errorf("%s | %w", "ModifiedDate", err) } case "modified_date_millis": if err := dec.Decode(&s.ModifiedDateMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ModifiedDateMillis", err) } case "next_execution": if err := dec.Decode(&s.NextExecution); err != nil { - return err + return fmt.Errorf("%s | %w", "NextExecution", err) } case "next_execution_millis": if err := dec.Decode(&s.NextExecutionMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "NextExecutionMillis", err) } case "policy": if err := dec.Decode(&s.Policy); err != nil { - return err + return fmt.Errorf("%s | %w", "Policy", err) } case "stats": if err := dec.Decode(&s.Stats); err != nil { - return err + return fmt.Errorf("%s | %w", "Stats", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/snapshotresponseitem.go b/typedapi/types/snapshotresponseitem.go index add261a3b6..a92bac0f27 100644 --- a/typedapi/types/snapshotresponseitem.go +++ b/typedapi/types/snapshotresponseitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SnapshotResponseItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/get/SnapshotGetResponse.ts#L44-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/get/SnapshotGetResponse.ts#L44-L48 type SnapshotResponseItem struct { Error *ErrorCause `json:"error,omitempty"` Repository string `json:"repository"` @@ -53,17 +54,17 @@ func (s *SnapshotResponseItem) UnmarshalJSON(data []byte) error { case "error": if err := dec.Decode(&s.Error); err != nil { - return err + return fmt.Errorf("%s | %w", "Error", err) } case "repository": if err := dec.Decode(&s.Repository); err != nil { - return err + return fmt.Errorf("%s | %w", "Repository", err) } case "snapshots": if err := dec.Decode(&s.Snapshots); err != nil { - return err + return fmt.Errorf("%s | %w", "Snapshots", err) } } diff --git a/typedapi/types/snapshotrestore.go b/typedapi/types/snapshotrestore.go index e84f78a28c..b21ecb4794 100644 --- a/typedapi/types/snapshotrestore.go +++ b/typedapi/types/snapshotrestore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SnapshotRestore type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/restore/SnapshotRestoreResponse.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/restore/SnapshotRestoreResponse.ts#L27-L31 type SnapshotRestore struct { Indices []string `json:"indices"` Shards ShardStatistics `json:"shards"` @@ -54,18 +55,18 @@ func (s *SnapshotRestore) UnmarshalJSON(data []byte) error { case "indices": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "shards": if err := dec.Decode(&s.Shards); err != nil { - return err + return fmt.Errorf("%s | %w", "Shards", err) } case "snapshot": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Snapshot", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/snapshotshardfailure.go b/typedapi/types/snapshotshardfailure.go index 7871dadc10..d8c22233ab 100644 --- a/typedapi/types/snapshotshardfailure.go +++ b/typedapi/types/snapshotshardfailure.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SnapshotShardFailure type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotShardFailure.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotShardFailure.ts#L22-L28 type SnapshotShardFailure struct { Index string `json:"index"` NodeId *string `json:"node_id,omitempty"` @@ -56,18 +57,18 @@ func (s *SnapshotShardFailure) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "node_id": if err := dec.Decode(&s.NodeId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,13 +79,13 @@ func (s *SnapshotShardFailure) UnmarshalJSON(data []byte) error { case "shard_id": if err := dec.Decode(&s.ShardId); err != nil { - return err + return fmt.Errorf("%s | %w", "ShardId", err) } case "status": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/snapshotshardsstats.go b/typedapi/types/snapshotshardsstats.go index 2e7db761f0..de338f2e63 100644 --- a/typedapi/types/snapshotshardsstats.go +++ b/typedapi/types/snapshotshardsstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SnapshotShardsStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotShardsStats.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotShardsStats.ts#L22-L29 type SnapshotShardsStats struct { Done int64 `json:"done"` Failed int64 `json:"failed"` @@ -62,7 +63,7 @@ func (s *SnapshotShardsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Done", err) } s.Done = value case float64: @@ -77,7 +78,7 @@ func (s *SnapshotShardsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Failed", err) } s.Failed = value case float64: @@ -92,7 +93,7 @@ func (s *SnapshotShardsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Finalizing", err) } s.Finalizing = value case float64: @@ -107,7 +108,7 @@ func (s *SnapshotShardsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Initializing", err) } s.Initializing = value case float64: @@ -122,7 +123,7 @@ func (s *SnapshotShardsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Started", err) } s.Started = value case float64: @@ -137,7 +138,7 @@ func (s *SnapshotShardsStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: diff --git a/typedapi/types/snapshotshardsstatus.go b/typedapi/types/snapshotshardsstatus.go index a74c2e6e02..d2d9848628 100644 --- a/typedapi/types/snapshotshardsstatus.go +++ b/typedapi/types/snapshotshardsstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // SnapshotShardsStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotShardsStatus.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotShardsStatus.ts#L24-L27 type SnapshotShardsStatus struct { Stage shardsstatsstage.ShardsStatsStage `json:"stage"` Stats ShardsStatsSummary `json:"stats"` diff --git a/typedapi/types/snapshotsrecord.go b/typedapi/types/snapshotsrecord.go index db41b26398..d089bdf48f 100644 --- a/typedapi/types/snapshotsrecord.go +++ b/typedapi/types/snapshotsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SnapshotsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/snapshots/types.ts#L24-L96 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/snapshots/types.ts#L24-L96 type SnapshotsRecord struct { // Duration The time it took the snapshot process to complete, in time units. Duration Duration `json:"duration,omitempty"` @@ -86,23 +87,23 @@ func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error { case "duration", "dur": if err := dec.Decode(&s.Duration); err != nil { - return err + return fmt.Errorf("%s | %w", "Duration", err) } case "end_epoch", "ete", "endEpoch": if err := dec.Decode(&s.EndEpoch); err != nil { - return err + return fmt.Errorf("%s | %w", "EndEpoch", err) } case "end_time", "eti", "endTime": if err := dec.Decode(&s.EndTime); err != nil { - return err + return fmt.Errorf("%s | %w", "EndTime", err) } case "failed_shards", "fs": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FailedShards", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -114,7 +115,7 @@ func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error { case "id", "snapshot": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -126,7 +127,7 @@ func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error { case "indices", "i": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -138,7 +139,7 @@ func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error { case "reason", "r": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -150,7 +151,7 @@ func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error { case "repository", "re", "repo": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Repository", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -161,7 +162,7 @@ func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error { case "start_epoch", "ste", "startEpoch": if err := dec.Decode(&s.StartEpoch); err != nil { - return err + return fmt.Errorf("%s | %w", "StartEpoch", err) } case "start_time", "sti", "startTime": @@ -180,14 +181,14 @@ func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error { default: if err := localDec.Decode(&s.StartTime); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTime", err) } } case "status", "s": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -199,7 +200,7 @@ func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error { case "successful_shards", "ss": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SuccessfulShards", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -211,7 +212,7 @@ func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error { case "total_shards", "ts": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalShards", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/snapshotstats.go b/typedapi/types/snapshotstats.go index b5f079a3e2..a888087718 100644 --- a/typedapi/types/snapshotstats.go +++ b/typedapi/types/snapshotstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SnapshotStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotStats.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotStats.ts#L23-L29 type SnapshotStats struct { Incremental FileCountSnapshotStats `json:"incremental"` StartTimeInMillis int64 `json:"start_time_in_millis"` @@ -55,27 +56,27 @@ func (s *SnapshotStats) UnmarshalJSON(data []byte) error { case "incremental": if err := dec.Decode(&s.Incremental); err != nil { - return err + return fmt.Errorf("%s | %w", "Incremental", err) } case "start_time_in_millis": if err := dec.Decode(&s.StartTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTimeInMillis", err) } case "time": if err := dec.Decode(&s.Time); err != nil { - return err + return fmt.Errorf("%s | %w", "Time", err) } case "time_in_millis": if err := dec.Decode(&s.TimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeInMillis", err) } case "total": if err := dec.Decode(&s.Total); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } } diff --git a/typedapi/types/snowballanalyzer.go b/typedapi/types/snowballanalyzer.go index ed9c409be7..e8a44f3f58 100644 --- a/typedapi/types/snowballanalyzer.go +++ b/typedapi/types/snowballanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/snowballlanguage" @@ -31,7 +32,7 @@ import ( // SnowballAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/analyzers.ts#L88-L93 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/analyzers.ts#L88-L93 type SnowballAnalyzer struct { Language snowballlanguage.SnowballLanguage `json:"language"` Stopwords []string `json:"stopwords,omitempty"` @@ -56,7 +57,7 @@ func (s *SnowballAnalyzer) UnmarshalJSON(data []byte) error { case "language": if err := dec.Decode(&s.Language); err != nil { - return err + return fmt.Errorf("%s | %w", "Language", err) } case "stopwords": @@ -65,24 +66,24 @@ func (s *SnowballAnalyzer) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } s.Stopwords = append(s.Stopwords, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/snowballtokenfilter.go b/typedapi/types/snowballtokenfilter.go index c89deccb52..0abadf1bcc 100644 --- a/typedapi/types/snowballtokenfilter.go +++ b/typedapi/types/snowballtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/snowballlanguage" @@ -31,7 +32,7 @@ import ( // SnowballTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L309-L312 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L309-L312 type SnowballTokenFilter struct { Language snowballlanguage.SnowballLanguage `json:"language"` Type string `json:"type,omitempty"` @@ -55,17 +56,17 @@ func (s *SnowballTokenFilter) UnmarshalJSON(data []byte) error { case "language": if err := dec.Decode(&s.Language); err != nil { - return err + return fmt.Errorf("%s | %w", "Language", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/softdeletes.go b/typedapi/types/softdeletes.go index 6f84adebee..3d500e1297 100644 --- a/typedapi/types/softdeletes.go +++ b/typedapi/types/softdeletes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SoftDeletes type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L50-L63 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L50-L63 type SoftDeletes struct { // Enabled Indicates whether soft deletes are enabled on the index. Enabled *bool `json:"enabled,omitempty"` @@ -66,7 +67,7 @@ func (s *SoftDeletes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = &value case bool: @@ -75,7 +76,7 @@ func (s *SoftDeletes) UnmarshalJSON(data []byte) error { case "retention_lease": if err := dec.Decode(&s.RetentionLease); err != nil { - return err + return fmt.Errorf("%s | %w", "RetentionLease", err) } } diff --git a/typedapi/types/sort.go b/typedapi/types/sort.go index b6b8b316e3..a99cae8a1d 100644 --- a/typedapi/types/sort.go +++ b/typedapi/types/sort.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Sort type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/sort.ts#L99-L99 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/sort.ts#L99-L99 type Sort []SortCombinations diff --git a/typedapi/types/sortcombinations.go b/typedapi/types/sortcombinations.go index 8d821e8fe9..d33cbdab3e 100644 --- a/typedapi/types/sortcombinations.go +++ b/typedapi/types/sortcombinations.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // SortOptions // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/sort.ts#L93-L97 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/sort.ts#L93-L97 type SortCombinations interface{} diff --git a/typedapi/types/sortoptions.go b/typedapi/types/sortoptions.go index 840f9e60ef..c6640890b8 100644 --- a/typedapi/types/sortoptions.go +++ b/typedapi/types/sortoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -27,7 +27,7 @@ import ( // SortOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/sort.ts#L82-L91 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/sort.ts#L82-L91 type SortOptions struct { Doc_ *ScoreSort `json:"_doc,omitempty"` GeoDistance_ *GeoDistanceSort `json:"_geo_distance,omitempty"` diff --git a/typedapi/types/sortprocessor.go b/typedapi/types/sortprocessor.go index 6c785c995b..fa38d780f0 100644 --- a/typedapi/types/sortprocessor.go +++ b/typedapi/types/sortprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SortProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L1075-L1091 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L1075-L1091 type SortProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -74,7 +75,7 @@ func (s *SortProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,13 +86,13 @@ func (s *SortProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -107,7 +108,7 @@ func (s *SortProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -116,18 +117,18 @@ func (s *SortProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "order": if err := dec.Decode(&s.Order); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -138,7 +139,7 @@ func (s *SortProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/sourceconfig.go b/typedapi/types/sourceconfig.go index 1845fc52cd..5046240d90 100644 --- a/typedapi/types/sourceconfig.go +++ b/typedapi/types/sourceconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // bool // SourceFilter // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/SourceFilter.ts#L33-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/SourceFilter.ts#L33-L37 type SourceConfig interface{} diff --git a/typedapi/types/sourceconfigparam.go b/typedapi/types/sourceconfigparam.go index f95256eaf4..84a499054b 100644 --- a/typedapi/types/sourceconfigparam.go +++ b/typedapi/types/sourceconfigparam.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // bool // []string // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/SourceFilter.ts#L39-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/SourceFilter.ts#L39-L45 type SourceConfigParam interface{} diff --git a/typedapi/types/sourcefield.go b/typedapi/types/sourcefield.go index ee70ee0302..c5846ed093 100644 --- a/typedapi/types/sourcefield.go +++ b/typedapi/types/sourcefield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SourceField type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/meta-fields.ts#L58-L65 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/meta-fields.ts#L58-L65 type SourceField struct { Compress *bool `json:"compress,omitempty"` CompressThreshold *string `json:"compress_threshold,omitempty"` @@ -64,7 +65,7 @@ func (s *SourceField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Compress", err) } s.Compress = &value case bool: @@ -74,7 +75,7 @@ func (s *SourceField) UnmarshalJSON(data []byte) error { case "compress_threshold": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CompressThreshold", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -90,7 +91,7 @@ func (s *SourceField) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = &value case bool: @@ -99,17 +100,17 @@ func (s *SourceField) UnmarshalJSON(data []byte) error { case "excludes": if err := dec.Decode(&s.Excludes); err != nil { - return err + return fmt.Errorf("%s | %w", "Excludes", err) } case "includes": if err := dec.Decode(&s.Includes); err != nil { - return err + return fmt.Errorf("%s | %w", "Includes", err) } case "mode": if err := dec.Decode(&s.Mode); err != nil { - return err + return fmt.Errorf("%s | %w", "Mode", err) } } diff --git a/typedapi/types/sourcefilter.go b/typedapi/types/sourcefilter.go index b29777fdd2..7ba64ebd63 100644 --- a/typedapi/types/sourcefilter.go +++ b/typedapi/types/sourcefilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SourceFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/SourceFilter.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/SourceFilter.ts#L23-L31 type SourceFilter struct { Excludes []string `json:"excludes,omitempty"` Includes []string `json:"includes,omitempty"` @@ -61,13 +62,13 @@ func (s *SourceFilter) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Excludes", err) } s.Excludes = append(s.Excludes, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Excludes); err != nil { - return err + return fmt.Errorf("%s | %w", "Excludes", err) } } @@ -77,13 +78,13 @@ func (s *SourceFilter) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Includes", err) } s.Includes = append(s.Includes, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Includes); err != nil { - return err + return fmt.Errorf("%s | %w", "Includes", err) } } diff --git a/typedapi/types/sourceonlyrepository.go b/typedapi/types/sourceonlyrepository.go new file mode 100644 index 0000000000..bcf94bb501 --- /dev/null +++ b/typedapi/types/sourceonlyrepository.go @@ -0,0 +1,94 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" +) + +// SourceOnlyRepository type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotRepository.ts#L65-L68 +type SourceOnlyRepository struct { + Settings SourceOnlyRepositorySettings `json:"settings"` + Type string `json:"type,omitempty"` + Uuid *string `json:"uuid,omitempty"` +} + +func (s *SourceOnlyRepository) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return fmt.Errorf("%s | %w", "Settings", err) + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return fmt.Errorf("%s | %w", "Type", err) + } + + case "uuid": + if err := dec.Decode(&s.Uuid); err != nil { + return fmt.Errorf("%s | %w", "Uuid", err) + } + + } + } + return nil +} + +// MarshalJSON override marshalling to include literal value +func (s SourceOnlyRepository) MarshalJSON() ([]byte, error) { + type innerSourceOnlyRepository SourceOnlyRepository + tmp := innerSourceOnlyRepository{ + Settings: s.Settings, + Type: s.Type, + Uuid: s.Uuid, + } + + tmp.Type = "source" + + return json.Marshal(tmp) +} + +// NewSourceOnlyRepository returns a SourceOnlyRepository. +func NewSourceOnlyRepository() *SourceOnlyRepository { + r := &SourceOnlyRepository{} + + return r +} diff --git a/typedapi/types/sourceonlyrepositorysettings.go b/typedapi/types/sourceonlyrepositorysettings.go new file mode 100644 index 0000000000..88983c80e1 --- /dev/null +++ b/typedapi/types/sourceonlyrepositorysettings.go @@ -0,0 +1,141 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// SourceOnlyRepositorySettings type. +// +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotRepository.ts#L117-L124 +type SourceOnlyRepositorySettings struct { + ChunkSize ByteSize `json:"chunk_size,omitempty"` + Compress *bool `json:"compress,omitempty"` + DelegateType *string `json:"delegate_type,omitempty"` + MaxNumberOfSnapshots *int `json:"max_number_of_snapshots,omitempty"` + MaxRestoreBytesPerSec ByteSize `json:"max_restore_bytes_per_sec,omitempty"` + MaxSnapshotBytesPerSec ByteSize `json:"max_snapshot_bytes_per_sec,omitempty"` + ReadOnly *bool `json:"read_only,omitempty"` +} + +func (s *SourceOnlyRepositorySettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "chunk_size": + if err := dec.Decode(&s.ChunkSize); err != nil { + return fmt.Errorf("%s | %w", "ChunkSize", err) + } + + case "compress": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Compress", err) + } + s.Compress = &value + case bool: + s.Compress = &v + } + + case "delegate_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "DelegateType", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.DelegateType = &o + + case "max_number_of_snapshots": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "MaxNumberOfSnapshots", err) + } + s.MaxNumberOfSnapshots = &value + case float64: + f := int(v) + s.MaxNumberOfSnapshots = &f + } + + case "max_restore_bytes_per_sec": + if err := dec.Decode(&s.MaxRestoreBytesPerSec); err != nil { + return fmt.Errorf("%s | %w", "MaxRestoreBytesPerSec", err) + } + + case "max_snapshot_bytes_per_sec": + if err := dec.Decode(&s.MaxSnapshotBytesPerSec); err != nil { + return fmt.Errorf("%s | %w", "MaxSnapshotBytesPerSec", err) + } + + case "read_only", "readonly": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "ReadOnly", err) + } + s.ReadOnly = &value + case bool: + s.ReadOnly = &v + } + + } + } + return nil +} + +// NewSourceOnlyRepositorySettings returns a SourceOnlyRepositorySettings. +func NewSourceOnlyRepositorySettings() *SourceOnlyRepositorySettings { + r := &SourceOnlyRepositorySettings{} + + return r +} diff --git a/typedapi/types/spancontainingquery.go b/typedapi/types/spancontainingquery.go index eebc330374..645290ffe7 100644 --- a/typedapi/types/spancontainingquery.go +++ b/typedapi/types/spancontainingquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SpanContainingQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/span.ts#L25-L36 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/span.ts#L25-L36 type SpanContainingQuery struct { // Big Can be any span query. // Matching spans from `big` that contain matches from `little` are returned. @@ -64,7 +65,7 @@ func (s *SpanContainingQuery) UnmarshalJSON(data []byte) error { case "big": if err := dec.Decode(&s.Big); err != nil { - return err + return fmt.Errorf("%s | %w", "Big", err) } case "boost": @@ -74,7 +75,7 @@ func (s *SpanContainingQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -85,13 +86,13 @@ func (s *SpanContainingQuery) UnmarshalJSON(data []byte) error { case "little": if err := dec.Decode(&s.Little); err != nil { - return err + return fmt.Errorf("%s | %w", "Little", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/spanfieldmaskingquery.go b/typedapi/types/spanfieldmaskingquery.go index af05f12987..7d716c7602 100644 --- a/typedapi/types/spanfieldmaskingquery.go +++ b/typedapi/types/spanfieldmaskingquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SpanFieldMaskingQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/span.ts#L38-L41 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/span.ts#L38-L41 type SpanFieldMaskingQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -65,7 +66,7 @@ func (s *SpanFieldMaskingQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -76,18 +77,18 @@ func (s *SpanFieldMaskingQuery) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/spanfirstquery.go b/typedapi/types/spanfirstquery.go index adb45651ab..825aa3fc67 100644 --- a/typedapi/types/spanfirstquery.go +++ b/typedapi/types/spanfirstquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SpanFirstQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/span.ts#L43-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/span.ts#L43-L52 type SpanFirstQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -67,7 +68,7 @@ func (s *SpanFirstQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -84,7 +85,7 @@ func (s *SpanFirstQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "End", err) } s.End = value case float64: @@ -94,13 +95,13 @@ func (s *SpanFirstQuery) UnmarshalJSON(data []byte) error { case "match": if err := dec.Decode(&s.Match); err != nil { - return err + return fmt.Errorf("%s | %w", "Match", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/spangapquery.go b/typedapi/types/spangapquery.go index aec6d9d949..09d72d82dc 100644 --- a/typedapi/types/spangapquery.go +++ b/typedapi/types/spangapquery.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // SpanGapQuery type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/span.ts#L54-L56 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/span.ts#L54-L56 type SpanGapQuery map[string]int diff --git a/typedapi/types/spanmultitermquery.go b/typedapi/types/spanmultitermquery.go index bfa67c6990..ecad88c9a0 100644 --- a/typedapi/types/spanmultitermquery.go +++ b/typedapi/types/spanmultitermquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SpanMultiTermQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/span.ts#L58-L63 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/span.ts#L58-L63 type SpanMultiTermQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -66,7 +67,7 @@ func (s *SpanMultiTermQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -77,13 +78,13 @@ func (s *SpanMultiTermQuery) UnmarshalJSON(data []byte) error { case "match": if err := dec.Decode(&s.Match); err != nil { - return err + return fmt.Errorf("%s | %w", "Match", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/spannearquery.go b/typedapi/types/spannearquery.go index cdbc2dfe01..66380f58da 100644 --- a/typedapi/types/spannearquery.go +++ b/typedapi/types/spannearquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SpanNearQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/span.ts#L65-L78 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/span.ts#L65-L78 type SpanNearQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -69,7 +70,7 @@ func (s *SpanNearQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -80,7 +81,7 @@ func (s *SpanNearQuery) UnmarshalJSON(data []byte) error { case "clauses": if err := dec.Decode(&s.Clauses); err != nil { - return err + return fmt.Errorf("%s | %w", "Clauses", err) } case "in_order": @@ -90,7 +91,7 @@ func (s *SpanNearQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "InOrder", err) } s.InOrder = &value case bool: @@ -100,7 +101,7 @@ func (s *SpanNearQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -117,7 +118,7 @@ func (s *SpanNearQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Slop", err) } s.Slop = &value case float64: diff --git a/typedapi/types/spannotquery.go b/typedapi/types/spannotquery.go index 2713be4cc0..4ca7ca46a8 100644 --- a/typedapi/types/spannotquery.go +++ b/typedapi/types/spannotquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SpanNotQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/span.ts#L80-L104 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/span.ts#L80-L104 type SpanNotQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -77,7 +78,7 @@ func (s *SpanNotQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -94,7 +95,7 @@ func (s *SpanNotQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Dist", err) } s.Dist = &value case float64: @@ -104,12 +105,12 @@ func (s *SpanNotQuery) UnmarshalJSON(data []byte) error { case "exclude": if err := dec.Decode(&s.Exclude); err != nil { - return err + return fmt.Errorf("%s | %w", "Exclude", err) } case "include": if err := dec.Decode(&s.Include); err != nil { - return err + return fmt.Errorf("%s | %w", "Include", err) } case "post": @@ -120,7 +121,7 @@ func (s *SpanNotQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Post", err) } s.Post = &value case float64: @@ -136,7 +137,7 @@ func (s *SpanNotQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Pre", err) } s.Pre = &value case float64: @@ -147,7 +148,7 @@ func (s *SpanNotQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/spanorquery.go b/typedapi/types/spanorquery.go index 4cbe68b8b2..0c22a433fa 100644 --- a/typedapi/types/spanorquery.go +++ b/typedapi/types/spanorquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SpanOrQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/span.ts#L106-L111 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/span.ts#L106-L111 type SpanOrQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -65,7 +66,7 @@ func (s *SpanOrQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -76,13 +77,13 @@ func (s *SpanOrQuery) UnmarshalJSON(data []byte) error { case "clauses": if err := dec.Decode(&s.Clauses); err != nil { - return err + return fmt.Errorf("%s | %w", "Clauses", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/spanquery.go b/typedapi/types/spanquery.go index 64b2110ead..57687e2927 100644 --- a/typedapi/types/spanquery.go +++ b/typedapi/types/spanquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SpanQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/span.ts#L131-L170 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/span.ts#L131-L170 type SpanQuery struct { // FieldMaskingSpan Allows queries like `span_near` or `span_or` across different fields. FieldMaskingSpan *SpanFieldMaskingQuery `json:"field_masking_span,omitempty"` @@ -74,42 +75,42 @@ func (s *SpanQuery) UnmarshalJSON(data []byte) error { case "field_masking_span": if err := dec.Decode(&s.FieldMaskingSpan); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldMaskingSpan", err) } case "span_containing": if err := dec.Decode(&s.SpanContaining); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanContaining", err) } case "span_first": if err := dec.Decode(&s.SpanFirst); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanFirst", err) } case "span_gap": if err := dec.Decode(&s.SpanGap); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanGap", err) } case "span_multi": if err := dec.Decode(&s.SpanMulti); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanMulti", err) } case "span_near": if err := dec.Decode(&s.SpanNear); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanNear", err) } case "span_not": if err := dec.Decode(&s.SpanNot); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanNot", err) } case "span_or": if err := dec.Decode(&s.SpanOr); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanOr", err) } case "span_term": @@ -117,12 +118,12 @@ func (s *SpanQuery) UnmarshalJSON(data []byte) error { s.SpanTerm = make(map[string]SpanTermQuery, 0) } if err := dec.Decode(&s.SpanTerm); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanTerm", err) } case "span_within": if err := dec.Decode(&s.SpanWithin); err != nil { - return err + return fmt.Errorf("%s | %w", "SpanWithin", err) } } diff --git a/typedapi/types/spantermquery.go b/typedapi/types/spantermquery.go index 5646557d5d..0202a000b6 100644 --- a/typedapi/types/spantermquery.go +++ b/typedapi/types/spantermquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SpanTermQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/span.ts#L113-L116 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/span.ts#L113-L116 type SpanTermQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -73,7 +74,7 @@ func (s *SpanTermQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -85,7 +86,7 @@ func (s *SpanTermQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -97,7 +98,7 @@ func (s *SpanTermQuery) UnmarshalJSON(data []byte) error { case "value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/spanwithinquery.go b/typedapi/types/spanwithinquery.go index 43dc9fb91b..cfd527767a 100644 --- a/typedapi/types/spanwithinquery.go +++ b/typedapi/types/spanwithinquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SpanWithinQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/span.ts#L118-L129 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/span.ts#L118-L129 type SpanWithinQuery struct { // Big Can be any span query. // Matching spans from `little` that are enclosed within `big` are returned. @@ -64,7 +65,7 @@ func (s *SpanWithinQuery) UnmarshalJSON(data []byte) error { case "big": if err := dec.Decode(&s.Big); err != nil { - return err + return fmt.Errorf("%s | %w", "Big", err) } case "boost": @@ -74,7 +75,7 @@ func (s *SpanWithinQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -85,13 +86,13 @@ func (s *SpanWithinQuery) UnmarshalJSON(data []byte) error { case "little": if err := dec.Decode(&s.Little); err != nil { - return err + return fmt.Errorf("%s | %w", "Little", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/sparseembeddingresult.go b/typedapi/types/sparseembeddingresult.go index 96aa9efd92..7e1790f100 100644 --- a/typedapi/types/sparseembeddingresult.go +++ b/typedapi/types/sparseembeddingresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SparseEmbeddingResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/_types/Results.ts#L35-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/_types/Results.ts#L35-L37 type SparseEmbeddingResult struct { Embedding SparseVector `json:"embedding"` } @@ -51,7 +52,7 @@ func (s *SparseEmbeddingResult) UnmarshalJSON(data []byte) error { case "embedding": if err := dec.Decode(&s.Embedding); err != nil { - return err + return fmt.Errorf("%s | %w", "Embedding", err) } } diff --git a/typedapi/types/sparsevector.go b/typedapi/types/sparsevector.go index 16acb2472f..ca13c02954 100644 --- a/typedapi/types/sparsevector.go +++ b/typedapi/types/sparsevector.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // SparseVector type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/_types/Results.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/_types/Results.ts#L23-L27 type SparseVector map[string]float32 diff --git a/typedapi/types/sparsevectorproperty.go b/typedapi/types/sparsevectorproperty.go index 33d1eee3b0..ed2d13dd2a 100644 --- a/typedapi/types/sparsevectorproperty.go +++ b/typedapi/types/sparsevectorproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SparseVectorProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L193-L195 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L194-L196 type SparseVectorProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -60,7 +61,7 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -378,7 +379,7 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -391,7 +392,7 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -703,7 +704,7 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/splitprocessor.go b/typedapi/types/splitprocessor.go index 11487b6288..b1669abc94 100644 --- a/typedapi/types/splitprocessor.go +++ b/typedapi/types/splitprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SplitProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L1093-L1118 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L1093-L1118 type SplitProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -76,7 +77,7 @@ func (s *SplitProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -87,13 +88,13 @@ func (s *SplitProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,7 +110,7 @@ func (s *SplitProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -123,7 +124,7 @@ func (s *SplitProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -132,7 +133,7 @@ func (s *SplitProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "preserve_trailing": @@ -142,7 +143,7 @@ func (s *SplitProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PreserveTrailing", err) } s.PreserveTrailing = &value case bool: @@ -152,7 +153,7 @@ func (s *SplitProcessor) UnmarshalJSON(data []byte) error { case "separator": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Separator", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -164,7 +165,7 @@ func (s *SplitProcessor) UnmarshalJSON(data []byte) error { case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -175,7 +176,7 @@ func (s *SplitProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/sql.go b/typedapi/types/sql.go index 9be9b9ce20..86c22dfc25 100644 --- a/typedapi/types/sql.go +++ b/typedapi/types/sql.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Sql type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L386-L389 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L386-L389 type Sql struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -60,7 +61,7 @@ func (s *Sql) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -74,7 +75,7 @@ func (s *Sql) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -86,7 +87,7 @@ func (s *Sql) UnmarshalJSON(data []byte) error { s.Features = make(map[string]int, 0) } if err := dec.Decode(&s.Features); err != nil { - return err + return fmt.Errorf("%s | %w", "Features", err) } case "queries": @@ -94,7 +95,7 @@ func (s *Sql) UnmarshalJSON(data []byte) error { s.Queries = make(map[string]XpackQuery, 0) } if err := dec.Decode(&s.Queries); err != nil { - return err + return fmt.Errorf("%s | %w", "Queries", err) } } diff --git a/typedapi/types/ssl.go b/typedapi/types/ssl.go index 1a680e5f48..8eb59aa6fa 100644 --- a/typedapi/types/ssl.go +++ b/typedapi/types/ssl.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Ssl type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L391-L394 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L391-L394 type Ssl struct { Http FeatureToggle `json:"http"` Transport FeatureToggle `json:"transport"` diff --git a/typedapi/types/standardanalyzer.go b/typedapi/types/standardanalyzer.go index a8412c6808..7d4d1e8203 100644 --- a/typedapi/types/standardanalyzer.go +++ b/typedapi/types/standardanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StandardAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/analyzers.ts#L95-L99 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/analyzers.ts#L95-L99 type StandardAnalyzer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Stopwords []string `json:"stopwords,omitempty"` @@ -60,7 +61,7 @@ func (s *StandardAnalyzer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTokenLength", err) } s.MaxTokenLength = &value case float64: @@ -74,19 +75,19 @@ func (s *StandardAnalyzer) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } s.Stopwords = append(s.Stopwords, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/standarddeviationbounds.go b/typedapi/types/standarddeviationbounds.go index 40b858e000..32526fbdef 100644 --- a/typedapi/types/standarddeviationbounds.go +++ b/typedapi/types/standarddeviationbounds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // StandardDeviationBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L260-L267 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L260-L267 type StandardDeviationBounds struct { Lower Float64 `json:"lower,omitempty"` LowerPopulation Float64 `json:"lower_population,omitempty"` @@ -56,32 +57,32 @@ func (s *StandardDeviationBounds) UnmarshalJSON(data []byte) error { case "lower": if err := dec.Decode(&s.Lower); err != nil { - return err + return fmt.Errorf("%s | %w", "Lower", err) } case "lower_population": if err := dec.Decode(&s.LowerPopulation); err != nil { - return err + return fmt.Errorf("%s | %w", "LowerPopulation", err) } case "lower_sampling": if err := dec.Decode(&s.LowerSampling); err != nil { - return err + return fmt.Errorf("%s | %w", "LowerSampling", err) } case "upper": if err := dec.Decode(&s.Upper); err != nil { - return err + return fmt.Errorf("%s | %w", "Upper", err) } case "upper_population": if err := dec.Decode(&s.UpperPopulation); err != nil { - return err + return fmt.Errorf("%s | %w", "UpperPopulation", err) } case "upper_sampling": if err := dec.Decode(&s.UpperSampling); err != nil { - return err + return fmt.Errorf("%s | %w", "UpperSampling", err) } } diff --git a/typedapi/types/standarddeviationboundsasstring.go b/typedapi/types/standarddeviationboundsasstring.go index 4cbd0b0d75..088420e690 100644 --- a/typedapi/types/standarddeviationboundsasstring.go +++ b/typedapi/types/standarddeviationboundsasstring.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StandardDeviationBoundsAsString type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L269-L276 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L269-L276 type StandardDeviationBoundsAsString struct { Lower string `json:"lower"` LowerPopulation string `json:"lower_population"` @@ -58,7 +59,7 @@ func (s *StandardDeviationBoundsAsString) UnmarshalJSON(data []byte) error { case "lower": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Lower", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -70,7 +71,7 @@ func (s *StandardDeviationBoundsAsString) UnmarshalJSON(data []byte) error { case "lower_population": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LowerPopulation", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,7 +83,7 @@ func (s *StandardDeviationBoundsAsString) UnmarshalJSON(data []byte) error { case "lower_sampling": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LowerSampling", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -94,7 +95,7 @@ func (s *StandardDeviationBoundsAsString) UnmarshalJSON(data []byte) error { case "upper": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Upper", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -106,7 +107,7 @@ func (s *StandardDeviationBoundsAsString) UnmarshalJSON(data []byte) error { case "upper_population": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UpperPopulation", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -118,7 +119,7 @@ func (s *StandardDeviationBoundsAsString) UnmarshalJSON(data []byte) error { case "upper_sampling": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UpperSampling", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/standardtokenizer.go b/typedapi/types/standardtokenizer.go index 51d63cfa98..eaf01d90a1 100644 --- a/typedapi/types/standardtokenizer.go +++ b/typedapi/types/standardtokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StandardTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L105-L108 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L105-L108 type StandardTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Type string `json:"type,omitempty"` @@ -60,7 +61,7 @@ func (s *StandardTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTokenLength", err) } s.MaxTokenLength = &value case float64: @@ -70,12 +71,12 @@ func (s *StandardTokenizer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/statistics.go b/typedapi/types/statistics.go index 74b5503d97..707eb11ec3 100644 --- a/typedapi/types/statistics.go +++ b/typedapi/types/statistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Statistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/slm/_types/SnapshotLifecycle.ts#L51-L74 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/slm/_types/SnapshotLifecycle.ts#L51-L74 type Statistics struct { Policy *string `json:"policy,omitempty"` RetentionDeletionTime Duration `json:"retention_deletion_time,omitempty"` @@ -61,17 +62,17 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { case "policy": if err := dec.Decode(&s.Policy); err != nil { - return err + return fmt.Errorf("%s | %w", "Policy", err) } case "retention_deletion_time": if err := dec.Decode(&s.RetentionDeletionTime); err != nil { - return err + return fmt.Errorf("%s | %w", "RetentionDeletionTime", err) } case "retention_deletion_time_millis": if err := dec.Decode(&s.RetentionDeletionTimeMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "RetentionDeletionTimeMillis", err) } case "retention_failed": @@ -81,7 +82,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RetentionFailed", err) } s.RetentionFailed = &value case float64: @@ -96,7 +97,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RetentionRuns", err) } s.RetentionRuns = &value case float64: @@ -111,7 +112,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RetentionTimedOut", err) } s.RetentionTimedOut = &value case float64: @@ -126,7 +127,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalSnapshotDeletionFailures", err) } s.TotalSnapshotDeletionFailures = &value case float64: @@ -141,7 +142,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalSnapshotsDeleted", err) } s.TotalSnapshotsDeleted = &value case float64: @@ -156,7 +157,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalSnapshotsFailed", err) } s.TotalSnapshotsFailed = &value case float64: @@ -171,7 +172,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalSnapshotsTaken", err) } s.TotalSnapshotsTaken = &value case float64: diff --git a/typedapi/types/stats.go b/typedapi/types/stats.go index 47cc4dc7c6..d9ab25acaa 100644 --- a/typedapi/types/stats.go +++ b/typedapi/types/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // Stats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L30-L114 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L30-L114 type Stats struct { // AdaptiveSelection Statistics about adaptive replica selection. AdaptiveSelection map[string]AdaptiveSelection `json:"adaptive_selection,omitempty"` @@ -103,7 +104,7 @@ func (s *Stats) UnmarshalJSON(data []byte) error { s.AdaptiveSelection = make(map[string]AdaptiveSelection, 0) } if err := dec.Decode(&s.AdaptiveSelection); err != nil { - return err + return fmt.Errorf("%s | %w", "AdaptiveSelection", err) } case "attributes": @@ -111,7 +112,7 @@ func (s *Stats) UnmarshalJSON(data []byte) error { s.Attributes = make(map[string]string, 0) } if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "breakers": @@ -119,42 +120,42 @@ func (s *Stats) UnmarshalJSON(data []byte) error { s.Breakers = make(map[string]Breaker, 0) } if err := dec.Decode(&s.Breakers); err != nil { - return err + return fmt.Errorf("%s | %w", "Breakers", err) } case "discovery": if err := dec.Decode(&s.Discovery); err != nil { - return err + return fmt.Errorf("%s | %w", "Discovery", err) } case "fs": if err := dec.Decode(&s.Fs); err != nil { - return err + return fmt.Errorf("%s | %w", "Fs", err) } case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "http": if err := dec.Decode(&s.Http); err != nil { - return err + return fmt.Errorf("%s | %w", "Http", err) } case "indexing_pressure": if err := dec.Decode(&s.IndexingPressure); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexingPressure", err) } case "indices": if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "ingest": if err := dec.Decode(&s.Ingest); err != nil { - return err + return fmt.Errorf("%s | %w", "Ingest", err) } case "ip": @@ -163,44 +164,44 @@ func (s *Stats) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } s.Ip = append(s.Ip, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Ip); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } } case "jvm": if err := dec.Decode(&s.Jvm); err != nil { - return err + return fmt.Errorf("%s | %w", "Jvm", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "os": if err := dec.Decode(&s.Os); err != nil { - return err + return fmt.Errorf("%s | %w", "Os", err) } case "process": if err := dec.Decode(&s.Process); err != nil { - return err + return fmt.Errorf("%s | %w", "Process", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "script": if err := dec.Decode(&s.Script); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } case "script_cache": @@ -215,14 +216,14 @@ func (s *Stats) UnmarshalJSON(data []byte) error { o := NewScriptCache() err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptCache", err) } s.ScriptCache[key] = append(s.ScriptCache[key], *o) default: o := []ScriptCache{} err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptCache", err) } s.ScriptCache[key] = o } @@ -233,7 +234,7 @@ func (s *Stats) UnmarshalJSON(data []byte) error { s.ThreadPool = make(map[string]ThreadCount, 0) } if err := dec.Decode(&s.ThreadPool); err != nil { - return err + return fmt.Errorf("%s | %w", "ThreadPool", err) } case "timestamp": @@ -243,7 +244,7 @@ func (s *Stats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } s.Timestamp = &value case float64: @@ -253,12 +254,12 @@ func (s *Stats) UnmarshalJSON(data []byte) error { case "transport": if err := dec.Decode(&s.Transport); err != nil { - return err + return fmt.Errorf("%s | %w", "Transport", err) } case "transport_address": if err := dec.Decode(&s.TransportAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "TransportAddress", err) } } diff --git a/typedapi/types/statsaggregate.go b/typedapi/types/statsaggregate.go index 77698e429b..bea35b37e7 100644 --- a/typedapi/types/statsaggregate.go +++ b/typedapi/types/statsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StatsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L240-L255 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L240-L255 type StatsAggregate struct { Avg Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` @@ -61,13 +62,13 @@ func (s *StatsAggregate) UnmarshalJSON(data []byte) error { case "avg": if err := dec.Decode(&s.Avg); err != nil { - return err + return fmt.Errorf("%s | %w", "Avg", err) } case "avg_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AvgAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,7 +84,7 @@ func (s *StatsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -93,13 +94,13 @@ func (s *StatsAggregate) UnmarshalJSON(data []byte) error { case "max": if err := dec.Decode(&s.Max); err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } case "max_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -110,18 +111,18 @@ func (s *StatsAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min": if err := dec.Decode(&s.Min); err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } case "min_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MinAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -137,7 +138,7 @@ func (s *StatsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Sum", err) } f := Float64(value) s.Sum = f @@ -149,7 +150,7 @@ func (s *StatsAggregate) UnmarshalJSON(data []byte) error { case "sum_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SumAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/statsaggregation.go b/typedapi/types/statsaggregation.go index d11bf62535..ee4e22a91e 100644 --- a/typedapi/types/statsaggregation.go +++ b/typedapi/types/statsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StatsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L282-L282 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L282-L282 type StatsAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -58,13 +59,13 @@ func (s *StatsAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -75,13 +76,13 @@ func (s *StatsAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -90,7 +91,7 @@ func (s *StatsAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -99,7 +100,7 @@ func (s *StatsAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -107,7 +108,7 @@ func (s *StatsAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/statsbucketaggregate.go b/typedapi/types/statsbucketaggregate.go index 03fbd75392..a27b82e8a9 100644 --- a/typedapi/types/statsbucketaggregate.go +++ b/typedapi/types/statsbucketaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StatsBucketAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L257-L258 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L257-L258 type StatsBucketAggregate struct { Avg Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` @@ -61,13 +62,13 @@ func (s *StatsBucketAggregate) UnmarshalJSON(data []byte) error { case "avg": if err := dec.Decode(&s.Avg); err != nil { - return err + return fmt.Errorf("%s | %w", "Avg", err) } case "avg_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AvgAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,7 +84,7 @@ func (s *StatsBucketAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -93,13 +94,13 @@ func (s *StatsBucketAggregate) UnmarshalJSON(data []byte) error { case "max": if err := dec.Decode(&s.Max); err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } case "max_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -110,18 +111,18 @@ func (s *StatsBucketAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min": if err := dec.Decode(&s.Min); err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } case "min_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MinAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -137,7 +138,7 @@ func (s *StatsBucketAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Sum", err) } f := Float64(value) s.Sum = f @@ -149,7 +150,7 @@ func (s *StatsBucketAggregate) UnmarshalJSON(data []byte) error { case "sum_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SumAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/statsbucketaggregation.go b/typedapi/types/statsbucketaggregation.go index 6fcc9e7a71..3c6999e906 100644 --- a/typedapi/types/statsbucketaggregation.go +++ b/typedapi/types/statsbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // StatsBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L369-L369 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L369-L369 type StatsBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -63,13 +64,13 @@ func (s *StatsBucketAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,18 +81,18 @@ func (s *StatsBucketAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/status.go b/typedapi/types/status.go index 069a892aa3..d75cd44b25 100644 --- a/typedapi/types/status.go +++ b/typedapi/types/status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Status type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/snapshot/_types/SnapshotStatus.ts#L26-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/snapshot/_types/SnapshotStatus.ts#L26-L35 type Status struct { IncludeGlobalState bool `json:"include_global_state"` Indices map[string]SnapshotIndexStats `json:"indices"` @@ -64,7 +65,7 @@ func (s *Status) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IncludeGlobalState", err) } s.IncludeGlobalState = value case bool: @@ -76,13 +77,13 @@ func (s *Status) UnmarshalJSON(data []byte) error { s.Indices = make(map[string]SnapshotIndexStats, 0) } if err := dec.Decode(&s.Indices); err != nil { - return err + return fmt.Errorf("%s | %w", "Indices", err) } case "repository": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Repository", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -93,13 +94,13 @@ func (s *Status) UnmarshalJSON(data []byte) error { case "shards_stats": if err := dec.Decode(&s.ShardsStats); err != nil { - return err + return fmt.Errorf("%s | %w", "ShardsStats", err) } case "snapshot": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Snapshot", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -111,7 +112,7 @@ func (s *Status) UnmarshalJSON(data []byte) error { case "state": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -122,12 +123,12 @@ func (s *Status) UnmarshalJSON(data []byte) error { case "stats": if err := dec.Decode(&s.Stats); err != nil { - return err + return fmt.Errorf("%s | %w", "Stats", err) } case "uuid": if err := dec.Decode(&s.Uuid); err != nil { - return err + return fmt.Errorf("%s | %w", "Uuid", err) } } diff --git a/typedapi/types/stemmeroverridetokenfilter.go b/typedapi/types/stemmeroverridetokenfilter.go index f3da8d3131..402e0f6ff0 100644 --- a/typedapi/types/stemmeroverridetokenfilter.go +++ b/typedapi/types/stemmeroverridetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StemmerOverrideTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L314-L318 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L314-L318 type StemmerOverrideTokenFilter struct { Rules []string `json:"rules,omitempty"` RulesPath *string `json:"rules_path,omitempty"` @@ -55,13 +56,13 @@ func (s *StemmerOverrideTokenFilter) UnmarshalJSON(data []byte) error { case "rules": if err := dec.Decode(&s.Rules); err != nil { - return err + return fmt.Errorf("%s | %w", "Rules", err) } case "rules_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RulesPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,12 +73,12 @@ func (s *StemmerOverrideTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/stemmertokenfilter.go b/typedapi/types/stemmertokenfilter.go index 7bc5d30420..d827a44377 100644 --- a/typedapi/types/stemmertokenfilter.go +++ b/typedapi/types/stemmertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StemmerTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L320-L324 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L320-L324 type StemmerTokenFilter struct { Language *string `json:"language,omitempty"` Type string `json:"type,omitempty"` @@ -55,7 +56,7 @@ func (s *StemmerTokenFilter) UnmarshalJSON(data []byte) error { case "language", "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Language", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,12 +67,12 @@ func (s *StemmerTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/stepkey.go b/typedapi/types/stepkey.go index 48f27361b2..20116943d3 100644 --- a/typedapi/types/stepkey.go +++ b/typedapi/types/stepkey.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StepKey type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ilm/move_to_step/types.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ilm/move_to_step/types.ts#L20-L24 type StepKey struct { Action string `json:"action"` Name string `json:"name"` @@ -55,7 +56,7 @@ func (s *StepKey) UnmarshalJSON(data []byte) error { case "action": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Action", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -67,7 +68,7 @@ func (s *StepKey) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -79,7 +80,7 @@ func (s *StepKey) UnmarshalJSON(data []byte) error { case "phase": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Phase", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/stopanalyzer.go b/typedapi/types/stopanalyzer.go index 4a5925bd99..9e017b2236 100644 --- a/typedapi/types/stopanalyzer.go +++ b/typedapi/types/stopanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StopAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/analyzers.ts#L101-L106 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/analyzers.ts#L101-L106 type StopAnalyzer struct { Stopwords []string `json:"stopwords,omitempty"` StopwordsPath *string `json:"stopwords_path,omitempty"` @@ -59,20 +60,20 @@ func (s *StopAnalyzer) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } s.Stopwords = append(s.Stopwords, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } } case "stopwords_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "StopwordsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,12 +84,12 @@ func (s *StopAnalyzer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/stoptokenfilter.go b/typedapi/types/stoptokenfilter.go index d4f0422ac1..89294c6931 100644 --- a/typedapi/types/stoptokenfilter.go +++ b/typedapi/types/stoptokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StopTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L97-L103 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L97-L103 type StopTokenFilter struct { IgnoreCase *bool `json:"ignore_case,omitempty"` RemoveTrailing *bool `json:"remove_trailing,omitempty"` @@ -62,7 +63,7 @@ func (s *StopTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreCase", err) } s.IgnoreCase = &value case bool: @@ -76,7 +77,7 @@ func (s *StopTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RemoveTrailing", err) } s.RemoveTrailing = &value case bool: @@ -89,20 +90,20 @@ func (s *StopTokenFilter) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } s.Stopwords = append(s.Stopwords, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { - return err + return fmt.Errorf("%s | %w", "Stopwords", err) } } case "stopwords_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "StopwordsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -113,12 +114,12 @@ func (s *StopTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/stopwords.go b/typedapi/types/stopwords.go index 896df7af8a..ed2480216b 100644 --- a/typedapi/types/stopwords.go +++ b/typedapi/types/stopwords.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // StopWords type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/StopWords.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/StopWords.ts#L20-L26 type StopWords []string diff --git a/typedapi/types/storage.go b/typedapi/types/storage.go index 7adc9ad49c..4973680406 100644 --- a/typedapi/types/storage.go +++ b/typedapi/types/storage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // Storage type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L494-L503 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L498-L507 type Storage struct { // AllowMmap You can restrict the use of the mmapfs and the related hybridfs store type // via the setting node.store.allow_mmap. @@ -67,7 +68,7 @@ func (s *Storage) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowMmap", err) } s.AllowMmap = &value case bool: @@ -76,7 +77,7 @@ func (s *Storage) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/storedscript.go b/typedapi/types/storedscript.go index ee0f45f0b3..f92d528e79 100644 --- a/typedapi/types/storedscript.go +++ b/typedapi/types/storedscript.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // StoredScript type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Scripting.ts#L47-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Scripting.ts#L47-L57 type StoredScript struct { // Lang Specifies the language the script is written in. Lang scriptlanguage.ScriptLanguage `json:"lang"` @@ -58,7 +59,7 @@ func (s *StoredScript) UnmarshalJSON(data []byte) error { case "lang": if err := dec.Decode(&s.Lang); err != nil { - return err + return fmt.Errorf("%s | %w", "Lang", err) } case "options": @@ -66,13 +67,13 @@ func (s *StoredScript) UnmarshalJSON(data []byte) error { s.Options = make(map[string]string, 0) } if err := dec.Decode(&s.Options); err != nil { - return err + return fmt.Errorf("%s | %w", "Options", err) } case "source": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/storedscriptid.go b/typedapi/types/storedscriptid.go index 947a0519f2..ad794eee65 100644 --- a/typedapi/types/storedscriptid.go +++ b/typedapi/types/storedscriptid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // StoredScriptId type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Scripting.ts#L81-L86 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Scripting.ts#L81-L86 type StoredScriptId struct { // Id The `id` for a stored script. Id string `json:"id"` @@ -55,7 +56,7 @@ func (s *StoredScriptId) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "params": @@ -63,7 +64,7 @@ func (s *StoredScriptId) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } } diff --git a/typedapi/types/storestats.go b/typedapi/types/storestats.go index 6007b34dc8..835574c3d1 100644 --- a/typedapi/types/storestats.go +++ b/typedapi/types/storestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StoreStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L368-L395 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L368-L395 type StoreStats struct { // Reserved A prediction of how much larger the shard stores will eventually grow due to // ongoing peer recoveries, restoring snapshots, and similar activities. @@ -70,7 +71,7 @@ func (s *StoreStats) UnmarshalJSON(data []byte) error { case "reserved": if err := dec.Decode(&s.Reserved); err != nil { - return err + return fmt.Errorf("%s | %w", "Reserved", err) } case "reserved_in_bytes": @@ -80,7 +81,7 @@ func (s *StoreStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ReservedInBytes", err) } s.ReservedInBytes = value case float64: @@ -90,7 +91,7 @@ func (s *StoreStats) UnmarshalJSON(data []byte) error { case "size": if err := dec.Decode(&s.Size); err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } case "size_in_bytes": @@ -100,7 +101,7 @@ func (s *StoreStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SizeInBytes", err) } s.SizeInBytes = value case float64: @@ -110,7 +111,7 @@ func (s *StoreStats) UnmarshalJSON(data []byte) error { case "total_data_set_size": if err := dec.Decode(&s.TotalDataSetSize); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalDataSetSize", err) } case "total_data_set_size_in_bytes": @@ -120,7 +121,7 @@ func (s *StoreStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalDataSetSizeInBytes", err) } s.TotalDataSetSizeInBytes = &value case float64: diff --git a/typedapi/types/stringifiedboolean.go b/typedapi/types/stringifiedboolean.go index f301b345a7..03393ac15a 100644 --- a/typedapi/types/stringifiedboolean.go +++ b/typedapi/types/stringifiedboolean.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // bool // string // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_spec_utils/Stringified.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_spec_utils/Stringified.ts#L20-L27 type Stringifiedboolean interface{} diff --git a/typedapi/types/stringifiedepochtimeunitmillis.go b/typedapi/types/stringifiedepochtimeunitmillis.go index b469328349..fe021e2546 100644 --- a/typedapi/types/stringifiedepochtimeunitmillis.go +++ b/typedapi/types/stringifiedepochtimeunitmillis.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // int64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_spec_utils/Stringified.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_spec_utils/Stringified.ts#L20-L27 type StringifiedEpochTimeUnitMillis interface{} diff --git a/typedapi/types/stringifiedepochtimeunitseconds.go b/typedapi/types/stringifiedepochtimeunitseconds.go index 7699848941..4d36c22cbe 100644 --- a/typedapi/types/stringifiedepochtimeunitseconds.go +++ b/typedapi/types/stringifiedepochtimeunitseconds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // int64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_spec_utils/Stringified.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_spec_utils/Stringified.ts#L20-L27 type StringifiedEpochTimeUnitSeconds interface{} diff --git a/typedapi/types/stringifiedinteger.go b/typedapi/types/stringifiedinteger.go index fee90f3670..aacde7f833 100644 --- a/typedapi/types/stringifiedinteger.go +++ b/typedapi/types/stringifiedinteger.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // int // string // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_spec_utils/Stringified.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_spec_utils/Stringified.ts#L20-L27 type Stringifiedinteger interface{} diff --git a/typedapi/types/stringifiedversionnumber.go b/typedapi/types/stringifiedversionnumber.go index 157533ea71..1d2e5c5f58 100644 --- a/typedapi/types/stringifiedversionnumber.go +++ b/typedapi/types/stringifiedversionnumber.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // int64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_spec_utils/Stringified.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_spec_utils/Stringified.ts#L20-L27 type StringifiedVersionNumber interface{} diff --git a/typedapi/types/stringraretermsaggregate.go b/typedapi/types/stringraretermsaggregate.go index 766615b955..cec062ea24 100644 --- a/typedapi/types/stringraretermsaggregate.go +++ b/typedapi/types/stringraretermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // StringRareTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L443-L447 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L443-L447 type StringRareTermsAggregate struct { Buckets BucketsStringRareTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *StringRareTermsAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]StringRareTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []StringRareTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/stringraretermsbucket.go b/typedapi/types/stringraretermsbucket.go index 5d8c10d55b..bb7949b8a2 100644 --- a/typedapi/types/stringraretermsbucket.go +++ b/typedapi/types/stringraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // StringRareTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L449-L451 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L449-L451 type StringRareTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *StringRareTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -72,7 +72,7 @@ func (s *StringRareTermsBucket) UnmarshalJSON(data []byte) error { case "key": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -95,490 +95,490 @@ func (s *StringRareTermsBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -588,7 +588,7 @@ func (s *StringRareTermsBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/stringstatsaggregate.go b/typedapi/types/stringstatsaggregate.go index d0c143d95e..70e62895d1 100644 --- a/typedapi/types/stringstatsaggregate.go +++ b/typedapi/types/stringstatsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StringStatsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L693-L704 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L693-L704 type StringStatsAggregate struct { AvgLength Float64 `json:"avg_length,omitempty"` AvgLengthAsString *string `json:"avg_length_as_string,omitempty"` @@ -61,13 +62,13 @@ func (s *StringStatsAggregate) UnmarshalJSON(data []byte) error { case "avg_length": if err := dec.Decode(&s.AvgLength); err != nil { - return err + return fmt.Errorf("%s | %w", "AvgLength", err) } case "avg_length_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AvgLengthAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,7 +84,7 @@ func (s *StringStatsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -93,23 +94,23 @@ func (s *StringStatsAggregate) UnmarshalJSON(data []byte) error { case "distribution": if err := dec.Decode(&s.Distribution); err != nil { - return err + return fmt.Errorf("%s | %w", "Distribution", err) } case "entropy": if err := dec.Decode(&s.Entropy); err != nil { - return err + return fmt.Errorf("%s | %w", "Entropy", err) } case "max_length": if err := dec.Decode(&s.MaxLength); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxLength", err) } case "max_length_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxLengthAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -120,18 +121,18 @@ func (s *StringStatsAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min_length": if err := dec.Decode(&s.MinLength); err != nil { - return err + return fmt.Errorf("%s | %w", "MinLength", err) } case "min_length_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MinLengthAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/stringstatsaggregation.go b/typedapi/types/stringstatsaggregation.go index a4cd382f94..5611549add 100644 --- a/typedapi/types/stringstatsaggregation.go +++ b/typedapi/types/stringstatsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StringStatsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L284-L290 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L284-L290 type StringStatsAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -59,18 +60,18 @@ func (s *StringStatsAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -79,7 +80,7 @@ func (s *StringStatsAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -88,7 +89,7 @@ func (s *StringStatsAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -96,7 +97,7 @@ func (s *StringStatsAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -110,7 +111,7 @@ func (s *StringStatsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShowDistribution", err) } s.ShowDistribution = &value case bool: diff --git a/typedapi/types/stringtermsaggregate.go b/typedapi/types/stringtermsaggregate.go index 77aa3e6c91..e253b7c63d 100644 --- a/typedapi/types/stringtermsaggregate.go +++ b/typedapi/types/stringtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StringTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L384-L389 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L384-L389 type StringTermsAggregate struct { Buckets BucketsStringTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -63,13 +64,13 @@ func (s *StringTermsAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]StringTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []StringTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -81,7 +82,7 @@ func (s *StringTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } s.DocCountErrorUpperBound = &value case float64: @@ -91,7 +92,7 @@ func (s *StringTermsAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "sum_other_doc_count": @@ -101,7 +102,7 @@ func (s *StringTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SumOtherDocCount", err) } s.SumOtherDocCount = &value case float64: diff --git a/typedapi/types/stringtermsbucket.go b/typedapi/types/stringtermsbucket.go index 62fedb580a..b56f428fa3 100644 --- a/typedapi/types/stringtermsbucket.go +++ b/typedapi/types/stringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // StringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L395-L397 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L395-L397 type StringTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -62,7 +62,7 @@ func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -77,7 +77,7 @@ func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountError", err) } s.DocCountError = &value case float64: @@ -87,7 +87,7 @@ func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { case "key": if err := dec.Decode(&s.Key); err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } default: @@ -104,490 +104,490 @@ func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -597,7 +597,7 @@ func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/stupidbackoffsmoothingmodel.go b/typedapi/types/stupidbackoffsmoothingmodel.go index 22a30f201a..c172dcfb6a 100644 --- a/typedapi/types/stupidbackoffsmoothingmodel.go +++ b/typedapi/types/stupidbackoffsmoothingmodel.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // StupidBackoffSmoothingModel type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L460-L465 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L460-L465 type StupidBackoffSmoothingModel struct { // Discount A constant factor that the lower order n-gram model is discounted by. Discount Float64 `json:"discount"` @@ -58,7 +59,7 @@ func (s *StupidBackoffSmoothingModel) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Discount", err) } f := Float64(value) s.Discount = f diff --git a/typedapi/types/suggest.go b/typedapi/types/suggest.go index 5f07c54ed8..c2ea288d87 100644 --- a/typedapi/types/suggest.go +++ b/typedapi/types/suggest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ package types // PhraseSuggest // TermSuggest // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L34-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L34-L40 type Suggest interface{} diff --git a/typedapi/types/suggestcontext.go b/typedapi/types/suggestcontext.go index e22079eb5b..29f24825c7 100644 --- a/typedapi/types/suggestcontext.go +++ b/typedapi/types/suggestcontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SuggestContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/specialized.ts#L37-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/specialized.ts#L37-L42 type SuggestContext struct { Name string `json:"name"` Path *string `json:"path,omitempty"` @@ -55,18 +56,18 @@ func (s *SuggestContext) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "path": if err := dec.Decode(&s.Path); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } case "precision": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Precision", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -78,7 +79,7 @@ func (s *SuggestContext) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/suggester.go b/typedapi/types/suggester.go index 3406b477d6..14264071bd 100644 --- a/typedapi/types/suggester.go +++ b/typedapi/types/suggester.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -31,7 +31,7 @@ import ( // Suggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L101-L104 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L101-L104 type Suggester struct { Suggesters map[string]FieldSuggester `json:"-"` // Text Global suggest text, to avoid repetition when the same text is used in @@ -57,7 +57,7 @@ func (s *Suggester) UnmarshalJSON(data []byte) error { case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,7 +74,7 @@ func (s *Suggester) UnmarshalJSON(data []byte) error { } raw := NewFieldSuggester() if err := dec.Decode(&raw); err != nil { - return err + return fmt.Errorf("%s | %w", "Suggesters", err) } s.Suggesters[key] = *raw } diff --git a/typedapi/types/suggestfuzziness.go b/typedapi/types/suggestfuzziness.go index bf23fde3e7..74187c4349 100644 --- a/typedapi/types/suggestfuzziness.go +++ b/typedapi/types/suggestfuzziness.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SuggestFuzziness type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L193-L221 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L193-L221 type SuggestFuzziness struct { // Fuzziness The fuzziness factor. Fuzziness Fuzziness `json:"fuzziness,omitempty"` @@ -63,7 +64,7 @@ func (s *SuggestFuzziness) UnmarshalJSON(data []byte) error { case "fuzziness": if err := dec.Decode(&s.Fuzziness); err != nil { - return err + return fmt.Errorf("%s | %w", "Fuzziness", err) } case "min_length": @@ -74,7 +75,7 @@ func (s *SuggestFuzziness) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinLength", err) } s.MinLength = &value case float64: @@ -90,7 +91,7 @@ func (s *SuggestFuzziness) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrefixLength", err) } s.PrefixLength = &value case float64: @@ -105,7 +106,7 @@ func (s *SuggestFuzziness) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Transpositions", err) } s.Transpositions = &value case bool: @@ -119,7 +120,7 @@ func (s *SuggestFuzziness) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UnicodeAware", err) } s.UnicodeAware = &value case bool: diff --git a/typedapi/types/sumaggregate.go b/typedapi/types/sumaggregate.go index d155b589e4..398b5c8b2f 100644 --- a/typedapi/types/sumaggregate.go +++ b/typedapi/types/sumaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SumAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L203-L207 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L203-L207 type SumAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to @@ -57,18 +58,18 @@ func (s *SumAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/sumaggregation.go b/typedapi/types/sumaggregation.go index 6f8e1eaf4c..89e6be93f0 100644 --- a/typedapi/types/sumaggregation.go +++ b/typedapi/types/sumaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SumAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L292-L292 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L292-L292 type SumAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -58,13 +59,13 @@ func (s *SumAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -75,13 +76,13 @@ func (s *SumAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -90,7 +91,7 @@ func (s *SumAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -99,7 +100,7 @@ func (s *SumAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -107,7 +108,7 @@ func (s *SumAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/sumbucketaggregation.go b/typedapi/types/sumbucketaggregation.go index af16accbdb..3201f8e00c 100644 --- a/typedapi/types/sumbucketaggregation.go +++ b/typedapi/types/sumbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SumBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/pipeline.ts#L371-L371 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/pipeline.ts#L371-L371 type SumBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -63,13 +64,13 @@ func (s *SumBucketAggregation) UnmarshalJSON(data []byte) error { case "buckets_path": if err := dec.Decode(&s.BucketsPath); err != nil { - return err + return fmt.Errorf("%s | %w", "BucketsPath", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -80,18 +81,18 @@ func (s *SumBucketAggregation) UnmarshalJSON(data []byte) error { case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "GapPolicy", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/summary.go b/typedapi/types/summary.go index 09e85f284a..712cfc4ccf 100644 --- a/typedapi/types/summary.go +++ b/typedapi/types/summary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // Summary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/enrich/_types/Policy.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/enrich/_types/Policy.ts#L24-L26 type Summary struct { Config map[policytype.PolicyType]EnrichPolicy `json:"config"` } diff --git a/typedapi/types/synccontainer.go b/typedapi/types/synccontainer.go index f7cc4810a5..31cf3bfe90 100644 --- a/typedapi/types/synccontainer.go +++ b/typedapi/types/synccontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // SyncContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/_types/Transform.ts#L169-L175 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/_types/Transform.ts#L169-L175 type SyncContainer struct { // Time Specifies that the transform uses a time field to synchronize the source and // destination indices. diff --git a/typedapi/types/synonymgraphtokenfilter.go b/typedapi/types/synonymgraphtokenfilter.go index e6d8d41fdd..5a34dd2c0d 100644 --- a/typedapi/types/synonymgraphtokenfilter.go +++ b/typedapi/types/synonymgraphtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SynonymGraphTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L110-L119 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L110-L119 type SynonymGraphTokenFilter struct { Expand *bool `json:"expand,omitempty"` Format *synonymformat.SynonymFormat `json:"format,omitempty"` @@ -67,7 +68,7 @@ func (s *SynonymGraphTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Expand", err) } s.Expand = &value case bool: @@ -76,7 +77,7 @@ func (s *SynonymGraphTokenFilter) UnmarshalJSON(data []byte) error { case "format": if err := dec.Decode(&s.Format); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } case "lenient": @@ -86,7 +87,7 @@ func (s *SynonymGraphTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lenient", err) } s.Lenient = &value case bool: @@ -95,13 +96,13 @@ func (s *SynonymGraphTokenFilter) UnmarshalJSON(data []byte) error { case "synonyms": if err := dec.Decode(&s.Synonyms); err != nil { - return err + return fmt.Errorf("%s | %w", "Synonyms", err) } case "synonyms_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SynonymsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -113,7 +114,7 @@ func (s *SynonymGraphTokenFilter) UnmarshalJSON(data []byte) error { case "tokenizer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenizer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -124,7 +125,7 @@ func (s *SynonymGraphTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "updateable": @@ -134,7 +135,7 @@ func (s *SynonymGraphTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Updateable", err) } s.Updateable = &value case bool: @@ -143,7 +144,7 @@ func (s *SynonymGraphTokenFilter) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/synonymrule.go b/typedapi/types/synonymrule.go index 875823fccc..61c1e40414 100644 --- a/typedapi/types/synonymrule.go +++ b/typedapi/types/synonymrule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SynonymRule type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/synonyms/_types/SynonymRule.ts#L26-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/synonyms/_types/SynonymRule.ts#L26-L35 type SynonymRule struct { // Id Synonym Rule identifier Id *string `json:"id,omitempty"` @@ -55,12 +56,12 @@ func (s *SynonymRule) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "synonyms": if err := dec.Decode(&s.Synonyms); err != nil { - return err + return fmt.Errorf("%s | %w", "Synonyms", err) } } diff --git a/typedapi/types/synonymruleread.go b/typedapi/types/synonymruleread.go index aa47bc3ac3..9c0e12fb3d 100644 --- a/typedapi/types/synonymruleread.go +++ b/typedapi/types/synonymruleread.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // SynonymRuleRead type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/synonyms/_types/SynonymRule.ts#L38-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/synonyms/_types/SynonymRule.ts#L38-L47 type SynonymRuleRead struct { // Id Synonym Rule identifier Id string `json:"id"` @@ -55,12 +56,12 @@ func (s *SynonymRuleRead) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "synonyms": if err := dec.Decode(&s.Synonyms); err != nil { - return err + return fmt.Errorf("%s | %w", "Synonyms", err) } } diff --git a/typedapi/types/synonymssetitem.go b/typedapi/types/synonymssetitem.go index 0f9a8bf9c9..02049c5225 100644 --- a/typedapi/types/synonymssetitem.go +++ b/typedapi/types/synonymssetitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // SynonymsSetItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/synonyms/get_synonyms_sets/SynonymsSetsGetResponse.ts#L30-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/synonyms/get_synonyms_sets/SynonymsSetsGetResponse.ts#L30-L39 type SynonymsSetItem struct { // Count Number of synonym rules that the synonym set contains Count int `json:"count"` @@ -61,7 +62,7 @@ func (s *SynonymsSetItem) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -71,7 +72,7 @@ func (s *SynonymsSetItem) UnmarshalJSON(data []byte) error { case "synonyms_set": if err := dec.Decode(&s.SynonymsSet); err != nil { - return err + return fmt.Errorf("%s | %w", "SynonymsSet", err) } } diff --git a/typedapi/types/synonymsupdateresult.go b/typedapi/types/synonymsupdateresult.go index ef6ba83e94..0fce46e5b8 100644 --- a/typedapi/types/synonymsupdateresult.go +++ b/typedapi/types/synonymsupdateresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // SynonymsUpdateResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/synonyms/_types/SynonymsUpdateResult.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/synonyms/_types/SynonymsUpdateResult.ts#L23-L34 type SynonymsUpdateResult struct { // ReloadAnalyzersDetails Updating synonyms in a synonym set reloads the associated analyzers. // This is the analyzers reloading result diff --git a/typedapi/types/synonymtokenfilter.go b/typedapi/types/synonymtokenfilter.go index 88b3bf730a..b4c3342fe1 100644 --- a/typedapi/types/synonymtokenfilter.go +++ b/typedapi/types/synonymtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // SynonymTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L121-L130 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L121-L130 type SynonymTokenFilter struct { Expand *bool `json:"expand,omitempty"` Format *synonymformat.SynonymFormat `json:"format,omitempty"` @@ -67,7 +68,7 @@ func (s *SynonymTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Expand", err) } s.Expand = &value case bool: @@ -76,7 +77,7 @@ func (s *SynonymTokenFilter) UnmarshalJSON(data []byte) error { case "format": if err := dec.Decode(&s.Format); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } case "lenient": @@ -86,7 +87,7 @@ func (s *SynonymTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Lenient", err) } s.Lenient = &value case bool: @@ -95,13 +96,13 @@ func (s *SynonymTokenFilter) UnmarshalJSON(data []byte) error { case "synonyms": if err := dec.Decode(&s.Synonyms); err != nil { - return err + return fmt.Errorf("%s | %w", "Synonyms", err) } case "synonyms_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SynonymsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -113,7 +114,7 @@ func (s *SynonymTokenFilter) UnmarshalJSON(data []byte) error { case "tokenizer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenizer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -124,7 +125,7 @@ func (s *SynonymTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "updateable": @@ -134,7 +135,7 @@ func (s *SynonymTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Updateable", err) } s.Updateable = &value case bool: @@ -143,7 +144,7 @@ func (s *SynonymTokenFilter) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/targetmeanencodingpreprocessor.go b/typedapi/types/targetmeanencodingpreprocessor.go index 6df8a8205b..9a769a0167 100644 --- a/typedapi/types/targetmeanencodingpreprocessor.go +++ b/typedapi/types/targetmeanencodingpreprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TargetMeanEncodingPreprocessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/types.ts#L49-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/types.ts#L49-L54 type TargetMeanEncodingPreprocessor struct { DefaultValue Float64 `json:"default_value"` FeatureName string `json:"feature_name"` @@ -60,7 +61,7 @@ func (s *TargetMeanEncodingPreprocessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DefaultValue", err) } f := Float64(value) s.DefaultValue = f @@ -72,7 +73,7 @@ func (s *TargetMeanEncodingPreprocessor) UnmarshalJSON(data []byte) error { case "feature_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,7 +85,7 @@ func (s *TargetMeanEncodingPreprocessor) UnmarshalJSON(data []byte) error { case "field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -98,7 +99,7 @@ func (s *TargetMeanEncodingPreprocessor) UnmarshalJSON(data []byte) error { s.TargetMap = make(map[string]Float64, 0) } if err := dec.Decode(&s.TargetMap); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetMap", err) } } diff --git a/typedapi/types/taskfailure.go b/typedapi/types/taskfailure.go index 47a2037cb3..a9b7998342 100644 --- a/typedapi/types/taskfailure.go +++ b/typedapi/types/taskfailure.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TaskFailure type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Errors.ts#L66-L71 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Errors.ts#L66-L71 type TaskFailure struct { NodeId string `json:"node_id"` Reason ErrorCause `json:"reason"` @@ -55,18 +56,18 @@ func (s *TaskFailure) UnmarshalJSON(data []byte) error { case "node_id": if err := dec.Decode(&s.NodeId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } case "reason": if err := dec.Decode(&s.Reason); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } case "status": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,7 +83,7 @@ func (s *TaskFailure) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TaskId", err) } s.TaskId = value case float64: diff --git a/typedapi/types/taskid.go b/typedapi/types/taskid.go index 2ec0363e9d..be7f95dad7 100644 --- a/typedapi/types/taskid.go +++ b/typedapi/types/taskid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // int // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L132-L132 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L132-L132 type TaskId interface{} diff --git a/typedapi/types/taskinfo.go b/typedapi/types/taskinfo.go index c7fd6dd560..c01c380f85 100644 --- a/typedapi/types/taskinfo.go +++ b/typedapi/types/taskinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TaskInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/tasks/_types/TaskInfo.ts#L32-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/tasks/_types/TaskInfo.ts#L32-L47 type TaskInfo struct { Action string `json:"action"` Cancellable bool `json:"cancellable"` @@ -66,7 +67,7 @@ func (s *TaskInfo) UnmarshalJSON(data []byte) error { case "action": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Action", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -82,7 +83,7 @@ func (s *TaskInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Cancellable", err) } s.Cancellable = value case bool: @@ -96,7 +97,7 @@ func (s *TaskInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Cancelled", err) } s.Cancelled = &value case bool: @@ -106,7 +107,7 @@ func (s *TaskInfo) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -120,7 +121,7 @@ func (s *TaskInfo) UnmarshalJSON(data []byte) error { s.Headers = make(map[string]string, 0) } if err := dec.Decode(&s.Headers); err != nil { - return err + return fmt.Errorf("%s | %w", "Headers", err) } case "id": @@ -130,7 +131,7 @@ func (s *TaskInfo) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } s.Id = value case float64: @@ -140,38 +141,38 @@ func (s *TaskInfo) UnmarshalJSON(data []byte) error { case "node": if err := dec.Decode(&s.Node); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } case "parent_task_id": if err := dec.Decode(&s.ParentTaskId); err != nil { - return err + return fmt.Errorf("%s | %w", "ParentTaskId", err) } case "running_time": if err := dec.Decode(&s.RunningTime); err != nil { - return err + return fmt.Errorf("%s | %w", "RunningTime", err) } case "running_time_in_nanos": if err := dec.Decode(&s.RunningTimeInNanos); err != nil { - return err + return fmt.Errorf("%s | %w", "RunningTimeInNanos", err) } case "start_time_in_millis": if err := dec.Decode(&s.StartTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTimeInMillis", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/taskinfos.go b/typedapi/types/taskinfos.go index 5a08c30690..0e8b74ad74 100644 --- a/typedapi/types/taskinfos.go +++ b/typedapi/types/taskinfos.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // []TaskInfo // map[string]ParentTaskInfo // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/tasks/_types/TaskListResponseBase.ts#L40-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/tasks/_types/TaskListResponseBase.ts#L40-L43 type TaskInfos interface{} diff --git a/typedapi/types/tasksrecord.go b/typedapi/types/tasksrecord.go index d9f8afb4f1..ca288fa9d6 100644 --- a/typedapi/types/tasksrecord.go +++ b/typedapi/types/tasksrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TasksRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/tasks/types.ts#L22-L101 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/tasks/types.ts#L22-L101 type TasksRecord struct { // Action The task action. Action *string `json:"action,omitempty"` @@ -84,7 +85,7 @@ func (s *TasksRecord) UnmarshalJSON(data []byte) error { case "action", "ac": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Action", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -96,7 +97,7 @@ func (s *TasksRecord) UnmarshalJSON(data []byte) error { case "description", "desc": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -107,13 +108,13 @@ func (s *TasksRecord) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "ip", "i": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -125,7 +126,7 @@ func (s *TasksRecord) UnmarshalJSON(data []byte) error { case "node", "n": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -136,13 +137,13 @@ func (s *TasksRecord) UnmarshalJSON(data []byte) error { case "node_id", "ni": if err := dec.Decode(&s.NodeId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } case "parent_task_id", "pti": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ParentTaskId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -154,7 +155,7 @@ func (s *TasksRecord) UnmarshalJSON(data []byte) error { case "port", "po": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Port", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -166,7 +167,7 @@ func (s *TasksRecord) UnmarshalJSON(data []byte) error { case "running_time", "time": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RunningTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -178,7 +179,7 @@ func (s *TasksRecord) UnmarshalJSON(data []byte) error { case "running_time_ns": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RunningTimeNs", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -190,7 +191,7 @@ func (s *TasksRecord) UnmarshalJSON(data []byte) error { case "start_time", "start": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -201,13 +202,13 @@ func (s *TasksRecord) UnmarshalJSON(data []byte) error { case "task_id", "ti": if err := dec.Decode(&s.TaskId); err != nil { - return err + return fmt.Errorf("%s | %w", "TaskId", err) } case "timestamp", "ts", "hms", "hhmmss": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -219,7 +220,7 @@ func (s *TasksRecord) UnmarshalJSON(data []byte) error { case "type", "ty": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -230,13 +231,13 @@ func (s *TasksRecord) UnmarshalJSON(data []byte) error { case "version", "v": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "x_opaque_id", "x": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "XOpaqueId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/tdigest.go b/typedapi/types/tdigest.go index 68846d869e..af5680b07f 100644 --- a/typedapi/types/tdigest.go +++ b/typedapi/types/tdigest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TDigest type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L223-L228 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L223-L228 type TDigest struct { // Compression Limits the maximum number of nodes used by the underlying TDigest algorithm // to `20 * compression`, enabling control of memory usage and approximation @@ -61,7 +62,7 @@ func (s *TDigest) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Compression", err) } s.Compression = &value case float64: diff --git a/typedapi/types/tdigestpercentileranksaggregate.go b/typedapi/types/tdigestpercentileranksaggregate.go index 6304578896..80ed04dcbe 100644 --- a/typedapi/types/tdigestpercentileranksaggregate.go +++ b/typedapi/types/tdigestpercentileranksaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TDigestPercentileRanksAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L175-L176 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L175-L176 type TDigestPercentileRanksAggregate struct { Meta Metadata `json:"meta,omitempty"` Values Percentiles `json:"values"` @@ -52,7 +53,7 @@ func (s *TDigestPercentileRanksAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "values": @@ -65,13 +66,13 @@ func (s *TDigestPercentileRanksAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(KeyedPercentiles, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } s.Values = o case '[': o := []ArrayPercentilesItem{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } s.Values = o } diff --git a/typedapi/types/tdigestpercentilesaggregate.go b/typedapi/types/tdigestpercentilesaggregate.go index 27d664e6fa..a5e6e9bd74 100644 --- a/typedapi/types/tdigestpercentilesaggregate.go +++ b/typedapi/types/tdigestpercentilesaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TDigestPercentilesAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L172-L173 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L172-L173 type TDigestPercentilesAggregate struct { Meta Metadata `json:"meta,omitempty"` Values Percentiles `json:"values"` @@ -52,7 +53,7 @@ func (s *TDigestPercentilesAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "values": @@ -65,13 +66,13 @@ func (s *TDigestPercentilesAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(KeyedPercentiles, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } s.Values = o case '[': o := []ArrayPercentilesItem{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Values", err) } s.Values = o } diff --git a/typedapi/types/template.go b/typedapi/types/template.go index 1b86d21304..7e3f1305f2 100644 --- a/typedapi/types/template.go +++ b/typedapi/types/template.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // Template type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L33-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L33-L37 type Template struct { Aliases map[string]Alias `json:"aliases"` Mappings TypeMapping `json:"mappings"` diff --git a/typedapi/types/templateconfig.go b/typedapi/types/templateconfig.go index 7e0eda00f9..b2cece9873 100644 --- a/typedapi/types/templateconfig.go +++ b/typedapi/types/templateconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TemplateConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/msearch_template/types.ts#L28-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/msearch_template/types.ts#L28-L54 type TemplateConfig struct { // Explain If `true`, returns detailed information about score calculation as part of // each hit. @@ -72,7 +73,7 @@ func (s *TemplateConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Explain", err) } s.Explain = &value case bool: @@ -81,7 +82,7 @@ func (s *TemplateConfig) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "params": @@ -89,7 +90,7 @@ func (s *TemplateConfig) UnmarshalJSON(data []byte) error { s.Params = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "profile": @@ -99,7 +100,7 @@ func (s *TemplateConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Profile", err) } s.Profile = &value case bool: @@ -109,7 +110,7 @@ func (s *TemplateConfig) UnmarshalJSON(data []byte) error { case "source": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/templatemapping.go b/typedapi/types/templatemapping.go index fc4b8d14c7..2de551c614 100644 --- a/typedapi/types/templatemapping.go +++ b/typedapi/types/templatemapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TemplateMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/TemplateMapping.ts#L27-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/TemplateMapping.ts#L27-L34 type TemplateMapping struct { Aliases map[string]Alias `json:"aliases"` IndexPatterns []string `json:"index_patterns"` @@ -60,17 +61,17 @@ func (s *TemplateMapping) UnmarshalJSON(data []byte) error { s.Aliases = make(map[string]Alias, 0) } if err := dec.Decode(&s.Aliases); err != nil { - return err + return fmt.Errorf("%s | %w", "Aliases", err) } case "index_patterns": if err := dec.Decode(&s.IndexPatterns); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPatterns", err) } case "mappings": if err := dec.Decode(&s.Mappings); err != nil { - return err + return fmt.Errorf("%s | %w", "Mappings", err) } case "order": @@ -81,7 +82,7 @@ func (s *TemplateMapping) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } s.Order = value case float64: @@ -94,12 +95,12 @@ func (s *TemplateMapping) UnmarshalJSON(data []byte) error { s.Settings = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/templatesrecord.go b/typedapi/types/templatesrecord.go index dceca7961f..1039da6673 100644 --- a/typedapi/types/templatesrecord.go +++ b/typedapi/types/templatesrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TemplatesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/templates/types.ts#L22-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/templates/types.ts#L22-L48 type TemplatesRecord struct { // ComposedOf The component templates that comprise the index template. ComposedOf *string `json:"composed_of,omitempty"` @@ -62,7 +63,7 @@ func (s *TemplatesRecord) UnmarshalJSON(data []byte) error { case "composed_of", "c": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ComposedOf", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,7 +75,7 @@ func (s *TemplatesRecord) UnmarshalJSON(data []byte) error { case "index_patterns", "t": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPatterns", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -85,13 +86,13 @@ func (s *TemplatesRecord) UnmarshalJSON(data []byte) error { case "name", "n": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "order", "o", "p": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -102,7 +103,7 @@ func (s *TemplatesRecord) UnmarshalJSON(data []byte) error { case "version", "v": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/term.go b/typedapi/types/term.go index da41292aa8..280eba5ef2 100644 --- a/typedapi/types/term.go +++ b/typedapi/types/term.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Term type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/termvectors/types.ts#L34-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/termvectors/types.ts#L34-L40 type Term struct { DocFreq *int `json:"doc_freq,omitempty"` Score *Float64 `json:"score,omitempty"` @@ -62,7 +63,7 @@ func (s *Term) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocFreq", err) } s.DocFreq = &value case float64: @@ -77,7 +78,7 @@ func (s *Term) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Score", err) } f := Float64(value) s.Score = &f @@ -94,7 +95,7 @@ func (s *Term) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TermFreq", err) } s.TermFreq = value case float64: @@ -104,7 +105,7 @@ func (s *Term) UnmarshalJSON(data []byte) error { case "tokens": if err := dec.Decode(&s.Tokens); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokens", err) } case "ttf": @@ -115,7 +116,7 @@ func (s *Term) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Ttf", err) } s.Ttf = &value case float64: diff --git a/typedapi/types/termquery.go b/typedapi/types/termquery.go index 4f04d74ac3..2b63aee927 100644 --- a/typedapi/types/termquery.go +++ b/typedapi/types/termquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TermQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L217-L231 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L217-L231 type TermQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -75,7 +76,7 @@ func (s *TermQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -91,7 +92,7 @@ func (s *TermQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CaseInsensitive", err) } s.CaseInsensitive = &value case bool: @@ -101,7 +102,7 @@ func (s *TermQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -112,7 +113,7 @@ func (s *TermQuery) UnmarshalJSON(data []byte) error { case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } } diff --git a/typedapi/types/termsaggregatebasedoubletermsbucket.go b/typedapi/types/termsaggregatebasedoubletermsbucket.go index 58b34a48c7..3cacc16139 100644 --- a/typedapi/types/termsaggregatebasedoubletermsbucket.go +++ b/typedapi/types/termsaggregatebasedoubletermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TermsAggregateBaseDoubleTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L377-L382 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L377-L382 type TermsAggregateBaseDoubleTermsBucket struct { Buckets BucketsDoubleTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -63,13 +64,13 @@ func (s *TermsAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]DoubleTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []DoubleTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -81,7 +82,7 @@ func (s *TermsAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } s.DocCountErrorUpperBound = &value case float64: @@ -91,7 +92,7 @@ func (s *TermsAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "sum_other_doc_count": @@ -101,7 +102,7 @@ func (s *TermsAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SumOtherDocCount", err) } s.SumOtherDocCount = &value case float64: diff --git a/typedapi/types/termsaggregatebaselongtermsbucket.go b/typedapi/types/termsaggregatebaselongtermsbucket.go index b23995b856..111cf583fd 100644 --- a/typedapi/types/termsaggregatebaselongtermsbucket.go +++ b/typedapi/types/termsaggregatebaselongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TermsAggregateBaseLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L377-L382 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L377-L382 type TermsAggregateBaseLongTermsBucket struct { Buckets BucketsLongTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -63,13 +64,13 @@ func (s *TermsAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]LongTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []LongTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -81,7 +82,7 @@ func (s *TermsAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } s.DocCountErrorUpperBound = &value case float64: @@ -91,7 +92,7 @@ func (s *TermsAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "sum_other_doc_count": @@ -101,7 +102,7 @@ func (s *TermsAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SumOtherDocCount", err) } s.SumOtherDocCount = &value case float64: diff --git a/typedapi/types/termsaggregatebasemultitermsbucket.go b/typedapi/types/termsaggregatebasemultitermsbucket.go index 41e2ee02c5..e765602818 100644 --- a/typedapi/types/termsaggregatebasemultitermsbucket.go +++ b/typedapi/types/termsaggregatebasemultitermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TermsAggregateBaseMultiTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L377-L382 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L377-L382 type TermsAggregateBaseMultiTermsBucket struct { Buckets BucketsMultiTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -63,13 +64,13 @@ func (s *TermsAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]MultiTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []MultiTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -81,7 +82,7 @@ func (s *TermsAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } s.DocCountErrorUpperBound = &value case float64: @@ -91,7 +92,7 @@ func (s *TermsAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "sum_other_doc_count": @@ -101,7 +102,7 @@ func (s *TermsAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SumOtherDocCount", err) } s.SumOtherDocCount = &value case float64: diff --git a/typedapi/types/termsaggregatebasestringtermsbucket.go b/typedapi/types/termsaggregatebasestringtermsbucket.go index 343b700b1b..4d9d95e331 100644 --- a/typedapi/types/termsaggregatebasestringtermsbucket.go +++ b/typedapi/types/termsaggregatebasestringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TermsAggregateBaseStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L377-L382 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L377-L382 type TermsAggregateBaseStringTermsBucket struct { Buckets BucketsStringTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -63,13 +64,13 @@ func (s *TermsAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]StringTermsBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []StringTermsBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -81,7 +82,7 @@ func (s *TermsAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } s.DocCountErrorUpperBound = &value case float64: @@ -91,7 +92,7 @@ func (s *TermsAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "sum_other_doc_count": @@ -101,7 +102,7 @@ func (s *TermsAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SumOtherDocCount", err) } s.SumOtherDocCount = &value case float64: diff --git a/typedapi/types/termsaggregatebasevoid.go b/typedapi/types/termsaggregatebasevoid.go index fcf21b7d68..e7ccab00bb 100644 --- a/typedapi/types/termsaggregatebasevoid.go +++ b/typedapi/types/termsaggregatebasevoid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TermsAggregateBaseVoid type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L377-L382 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L377-L382 type TermsAggregateBaseVoid struct { Buckets BucketsVoid `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -63,13 +64,13 @@ func (s *TermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]interface{}, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []interface{}{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -81,7 +82,7 @@ func (s *TermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } s.DocCountErrorUpperBound = &value case float64: @@ -91,7 +92,7 @@ func (s *TermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "sum_other_doc_count": @@ -101,7 +102,7 @@ func (s *TermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SumOtherDocCount", err) } s.SumOtherDocCount = &value case float64: diff --git a/typedapi/types/termsaggregation.go b/typedapi/types/termsaggregation.go index 6fe4627bf8..855931674a 100644 --- a/typedapi/types/termsaggregation.go +++ b/typedapi/types/termsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -35,7 +36,7 @@ import ( // TermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L910-L970 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L910-L970 type TermsAggregation struct { // CollectMode Determines how child aggregations should be calculated: breadth-first or // depth-first. @@ -95,7 +96,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { case "collect_mode": if err := dec.Decode(&s.CollectMode); err != nil { - return err + return fmt.Errorf("%s | %w", "CollectMode", err) } case "exclude": @@ -104,30 +105,30 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Exclude", err) } s.Exclude = append(s.Exclude, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Exclude); err != nil { - return err + return fmt.Errorf("%s | %w", "Exclude", err) } } case "execution_hint": if err := dec.Decode(&s.ExecutionHint); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutionHint", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -138,12 +139,12 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { case "include": if err := dec.Decode(&s.Include); err != nil { - return err + return fmt.Errorf("%s | %w", "Include", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "min_doc_count": @@ -154,7 +155,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocCount", err) } s.MinDocCount = &value case float64: @@ -164,7 +165,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "missing_bucket": @@ -174,7 +175,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MissingBucket", err) } s.MissingBucket = &value case bool: @@ -183,13 +184,13 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { case "missing_order": if err := dec.Decode(&s.MissingOrder); err != nil { - return err + return fmt.Errorf("%s | %w", "MissingOrder", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -208,13 +209,13 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]sortorder.SortOrder, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } s.Order = o case '[': o := make([]map[string]sortorder.SortOrder, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } s.Order = o } @@ -222,7 +223,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -231,7 +232,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -240,7 +241,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -248,7 +249,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -263,7 +264,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardSize", err) } s.ShardSize = &value case float64: @@ -278,7 +279,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShowTermDocCountError", err) } s.ShowTermDocCountError = &value case bool: @@ -293,7 +294,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -304,7 +305,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { case "value_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/termsexclude.go b/typedapi/types/termsexclude.go index a38e994759..d77ca4a5f3 100644 --- a/typedapi/types/termsexclude.go +++ b/typedapi/types/termsexclude.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TermsExclude type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L1001-L1002 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L1001-L1002 type TermsExclude []string diff --git a/typedapi/types/termsgrouping.go b/typedapi/types/termsgrouping.go index 9694633baf..e304accf1b 100644 --- a/typedapi/types/termsgrouping.go +++ b/typedapi/types/termsgrouping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TermsGrouping type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/rollup/_types/Groupings.ts#L75-L82 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/rollup/_types/Groupings.ts#L75-L82 type TermsGrouping struct { // Fields The set of fields that you wish to collect terms for. // This array can contain fields that are both keyword and numerics. @@ -58,13 +59,13 @@ func (s *TermsGrouping) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } s.Fields = append(s.Fields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { - return err + return fmt.Errorf("%s | %w", "Fields", err) } } diff --git a/typedapi/types/termsinclude.go b/typedapi/types/termsinclude.go index 55b2e3ffb4..5605a71461 100644 --- a/typedapi/types/termsinclude.go +++ b/typedapi/types/termsinclude.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,5 +26,5 @@ package types // []string // TermsPartition // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L998-L999 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L998-L999 type TermsInclude interface{} diff --git a/typedapi/types/termslookup.go b/typedapi/types/termslookup.go index 6ca89f7c6d..41137af646 100644 --- a/typedapi/types/termslookup.go +++ b/typedapi/types/termslookup.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TermsLookup type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L242-L247 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L242-L247 type TermsLookup struct { Id string `json:"id"` Index string `json:"index"` @@ -54,22 +55,22 @@ func (s *TermsLookup) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "path": if err := dec.Decode(&s.Path); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } } diff --git a/typedapi/types/termspartition.go b/typedapi/types/termspartition.go index a759104b69..e1702ba350 100644 --- a/typedapi/types/termspartition.go +++ b/typedapi/types/termspartition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TermsPartition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L1004-L1013 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L1004-L1013 type TermsPartition struct { // NumPartitions The number of partitions. NumPartitions int64 `json:"num_partitions"` @@ -60,7 +61,7 @@ func (s *TermsPartition) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumPartitions", err) } s.NumPartitions = value case float64: @@ -75,7 +76,7 @@ func (s *TermsPartition) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Partition", err) } s.Partition = value case float64: diff --git a/typedapi/types/termsquery.go b/typedapi/types/termsquery.go index 25dbee9664..d3797f96be 100644 --- a/typedapi/types/termsquery.go +++ b/typedapi/types/termsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -31,7 +31,7 @@ import ( // TermsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L233-L235 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L233-L235 type TermsQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -65,7 +65,7 @@ func (s *TermsQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -77,7 +77,7 @@ func (s *TermsQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -91,7 +91,7 @@ func (s *TermsQuery) UnmarshalJSON(data []byte) error { s.TermsQuery = make(map[string]TermsQueryField, 0) } if err := dec.Decode(&s.TermsQuery); err != nil { - return err + return fmt.Errorf("%s | %w", "TermsQuery", err) } default: diff --git a/typedapi/types/termsqueryfield.go b/typedapi/types/termsqueryfield.go index cc834639ce..d8092bc39d 100644 --- a/typedapi/types/termsqueryfield.go +++ b/typedapi/types/termsqueryfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // []FieldValue // TermsLookup // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L237-L240 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L237-L240 type TermsQueryField interface{} diff --git a/typedapi/types/termssetquery.go b/typedapi/types/termssetquery.go index 9c15c79523..15b33a64b1 100644 --- a/typedapi/types/termssetquery.go +++ b/typedapi/types/termssetquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TermsSetQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L249-L262 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L249-L262 type TermsSetQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -71,7 +72,7 @@ func (s *TermsSetQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -82,13 +83,13 @@ func (s *TermsSetQuery) UnmarshalJSON(data []byte) error { case "minimum_should_match_field": if err := dec.Decode(&s.MinimumShouldMatchField); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatchField", err) } case "minimum_should_match_script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatchScript", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -97,7 +98,7 @@ func (s *TermsSetQuery) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatchScript", err) } switch t { @@ -106,7 +107,7 @@ func (s *TermsSetQuery) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatchScript", err) } s.MinimumShouldMatchScript = o @@ -114,7 +115,7 @@ func (s *TermsSetQuery) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "MinimumShouldMatchScript", err) } s.MinimumShouldMatchScript = o @@ -124,7 +125,7 @@ func (s *TermsSetQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -135,7 +136,7 @@ func (s *TermsSetQuery) UnmarshalJSON(data []byte) error { case "terms": if err := dec.Decode(&s.Terms); err != nil { - return err + return fmt.Errorf("%s | %w", "Terms", err) } } diff --git a/typedapi/types/termsuggest.go b/typedapi/types/termsuggest.go index d8968ac978..9f4ab95238 100644 --- a/typedapi/types/termsuggest.go +++ b/typedapi/types/termsuggest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TermSuggest type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L64-L69 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L64-L69 type TermSuggest struct { Length int `json:"length"` Offset int `json:"offset"` @@ -61,7 +62,7 @@ func (s *TermSuggest) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Length", err) } s.Length = value case float64: @@ -77,7 +78,7 @@ func (s *TermSuggest) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Offset", err) } s.Offset = value case float64: @@ -91,20 +92,20 @@ func (s *TermSuggest) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewTermSuggestOption() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Options", err) } s.Options = append(s.Options, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Options); err != nil { - return err + return fmt.Errorf("%s | %w", "Options", err) } } case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/termsuggester.go b/typedapi/types/termsuggester.go index 985eb2643b..4e70d82a8d 100644 --- a/typedapi/types/termsuggester.go +++ b/typedapi/types/termsuggester.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // TermSuggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L503-L565 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L503-L565 type TermSuggester struct { // Analyzer The analyzer to analyze the suggest text with. // Defaults to the search analyzer of the suggest field. @@ -105,7 +106,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -116,7 +117,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "lowercase_terms": @@ -126,7 +127,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "LowercaseTerms", err) } s.LowercaseTerms = &value case bool: @@ -141,7 +142,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxEdits", err) } s.MaxEdits = &value case float64: @@ -157,7 +158,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxInspections", err) } s.MaxInspections = &value case float64: @@ -172,7 +173,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTermFreq", err) } f := float32(value) s.MaxTermFreq = &f @@ -188,7 +189,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocFreq", err) } f := float32(value) s.MinDocFreq = &f @@ -205,7 +206,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinWordLength", err) } s.MinWordLength = &value case float64: @@ -221,7 +222,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrefixLength", err) } s.PrefixLength = &value case float64: @@ -237,7 +238,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardSize", err) } s.ShardSize = &value case float64: @@ -253,7 +254,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -263,23 +264,23 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case "sort": if err := dec.Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } case "string_distance": if err := dec.Decode(&s.StringDistance); err != nil { - return err + return fmt.Errorf("%s | %w", "StringDistance", err) } case "suggest_mode": if err := dec.Decode(&s.SuggestMode); err != nil { - return err + return fmt.Errorf("%s | %w", "SuggestMode", err) } case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/termsuggestoption.go b/typedapi/types/termsuggestoption.go index 419267b4a2..8a96c2f570 100644 --- a/typedapi/types/termsuggestoption.go +++ b/typedapi/types/termsuggestoption.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TermSuggestOption type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/suggester.ts#L93-L99 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/suggester.ts#L93-L99 type TermSuggestOption struct { CollateMatch *bool `json:"collate_match,omitempty"` Freq int64 `json:"freq"` @@ -61,7 +62,7 @@ func (s *TermSuggestOption) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CollateMatch", err) } s.CollateMatch = &value case bool: @@ -75,7 +76,7 @@ func (s *TermSuggestOption) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Freq", err) } s.Freq = value case float64: @@ -86,7 +87,7 @@ func (s *TermSuggestOption) UnmarshalJSON(data []byte) error { case "highlighted": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Highlighted", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -102,7 +103,7 @@ func (s *TermSuggestOption) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Score", err) } f := Float64(value) s.Score = f @@ -114,7 +115,7 @@ func (s *TermSuggestOption) UnmarshalJSON(data []byte) error { case "text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Text", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/termvector.go b/typedapi/types/termvector.go index c8006d3576..e34e9507b7 100644 --- a/typedapi/types/termvector.go +++ b/typedapi/types/termvector.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TermVector type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/termvectors/types.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/termvectors/types.ts#L23-L26 type TermVector struct { FieldStatistics FieldStatistics `json:"field_statistics"` Terms map[string]Term `json:"terms"` diff --git a/typedapi/types/termvectorsfilter.go b/typedapi/types/termvectorsfilter.go index 9710a1c028..476024055e 100644 --- a/typedapi/types/termvectorsfilter.go +++ b/typedapi/types/termvectorsfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TermVectorsFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/termvectors/types.ts#L49-L86 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/termvectors/types.ts#L49-L86 type TermVectorsFilter struct { // MaxDocFreq Ignore words which occur in more than this many docs. // Defaults to unbounded. @@ -74,7 +75,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxDocFreq", err) } s.MaxDocFreq = &value case float64: @@ -90,7 +91,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxNumTerms", err) } s.MaxNumTerms = &value case float64: @@ -106,7 +107,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTermFreq", err) } s.MaxTermFreq = &value case float64: @@ -122,7 +123,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxWordLength", err) } s.MaxWordLength = &value case float64: @@ -138,7 +139,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocFreq", err) } s.MinDocFreq = &value case float64: @@ -154,7 +155,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinTermFreq", err) } s.MinTermFreq = &value case float64: @@ -170,7 +171,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinWordLength", err) } s.MinWordLength = &value case float64: diff --git a/typedapi/types/termvectorsresult.go b/typedapi/types/termvectorsresult.go index 3e60d80136..6e25e42f3e 100644 --- a/typedapi/types/termvectorsresult.go +++ b/typedapi/types/termvectorsresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TermVectorsResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/mtermvectors/types.ts#L96-L104 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/mtermvectors/types.ts#L96-L104 type TermVectorsResult struct { Error *ErrorCause `json:"error,omitempty"` Found *bool `json:"found,omitempty"` @@ -58,7 +59,7 @@ func (s *TermVectorsResult) UnmarshalJSON(data []byte) error { case "error": if err := dec.Decode(&s.Error); err != nil { - return err + return fmt.Errorf("%s | %w", "Error", err) } case "found": @@ -68,7 +69,7 @@ func (s *TermVectorsResult) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Found", err) } s.Found = &value case bool: @@ -77,12 +78,12 @@ func (s *TermVectorsResult) UnmarshalJSON(data []byte) error { case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "term_vectors": @@ -90,7 +91,7 @@ func (s *TermVectorsResult) UnmarshalJSON(data []byte) error { s.TermVectors = make(map[string]TermVector, 0) } if err := dec.Decode(&s.TermVectors); err != nil { - return err + return fmt.Errorf("%s | %w", "TermVectors", err) } case "took": @@ -100,7 +101,7 @@ func (s *TermVectorsResult) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Took", err) } s.Took = &value case float64: @@ -110,7 +111,7 @@ func (s *TermVectorsResult) UnmarshalJSON(data []byte) error { case "_version": if err := dec.Decode(&s.Version_); err != nil { - return err + return fmt.Errorf("%s | %w", "Version_", err) } } diff --git a/typedapi/types/termvectorstoken.go b/typedapi/types/termvectorstoken.go index 394d973973..8360fde468 100644 --- a/typedapi/types/termvectorstoken.go +++ b/typedapi/types/termvectorstoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TermVectorsToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/termvectors/types.ts#L42-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/termvectors/types.ts#L42-L47 type TermVectorsToken struct { EndOffset *int `json:"end_offset,omitempty"` Payload *string `json:"payload,omitempty"` @@ -61,7 +62,7 @@ func (s *TermVectorsToken) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EndOffset", err) } s.EndOffset = &value case float64: @@ -72,7 +73,7 @@ func (s *TermVectorsToken) UnmarshalJSON(data []byte) error { case "payload": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Payload", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -89,7 +90,7 @@ func (s *TermVectorsToken) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Position", err) } s.Position = value case float64: @@ -105,7 +106,7 @@ func (s *TermVectorsToken) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "StartOffset", err) } s.StartOffset = &value case float64: diff --git a/typedapi/types/testpopulation.go b/typedapi/types/testpopulation.go index a061a6002f..6d0d9b11fe 100644 --- a/typedapi/types/testpopulation.go +++ b/typedapi/types/testpopulation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TestPopulation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L310-L320 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L310-L320 type TestPopulation struct { // Field The field to aggregate. Field string `json:"field"` @@ -55,18 +56,18 @@ func (s *TestPopulation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "filter": if err := dec.Decode(&s.Filter); err != nil { - return err + return fmt.Errorf("%s | %w", "Filter", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -75,7 +76,7 @@ func (s *TestPopulation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -84,7 +85,7 @@ func (s *TestPopulation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -92,7 +93,7 @@ func (s *TestPopulation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/textclassificationinferenceoptions.go b/typedapi/types/textclassificationinferenceoptions.go index a20b500f97..4ad88f5c29 100644 --- a/typedapi/types/textclassificationinferenceoptions.go +++ b/typedapi/types/textclassificationinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TextClassificationInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L189-L199 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L189-L199 type TextClassificationInferenceOptions struct { // ClassificationLabels Classification labels to apply other than the stored labels. Must have the // same deminsions as the default configured labels @@ -61,7 +62,7 @@ func (s *TextClassificationInferenceOptions) UnmarshalJSON(data []byte) error { case "classification_labels": if err := dec.Decode(&s.ClassificationLabels); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassificationLabels", err) } case "num_top_classes": @@ -72,7 +73,7 @@ func (s *TextClassificationInferenceOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopClasses", err) } s.NumTopClasses = &value case float64: @@ -83,7 +84,7 @@ func (s *TextClassificationInferenceOptions) UnmarshalJSON(data []byte) error { case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -94,7 +95,7 @@ func (s *TextClassificationInferenceOptions) UnmarshalJSON(data []byte) error { case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/types/textclassificationinferenceupdateoptions.go b/typedapi/types/textclassificationinferenceupdateoptions.go index f13170bfd1..054f806162 100644 --- a/typedapi/types/textclassificationinferenceupdateoptions.go +++ b/typedapi/types/textclassificationinferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TextClassificationInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L363-L372 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L363-L372 type TextClassificationInferenceUpdateOptions struct { // ClassificationLabels Classification labels to apply other than the stored labels. Must have the // same deminsions as the default configured labels @@ -61,7 +62,7 @@ func (s *TextClassificationInferenceUpdateOptions) UnmarshalJSON(data []byte) er case "classification_labels": if err := dec.Decode(&s.ClassificationLabels); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassificationLabels", err) } case "num_top_classes": @@ -72,7 +73,7 @@ func (s *TextClassificationInferenceUpdateOptions) UnmarshalJSON(data []byte) er case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumTopClasses", err) } s.NumTopClasses = &value case float64: @@ -83,7 +84,7 @@ func (s *TextClassificationInferenceUpdateOptions) UnmarshalJSON(data []byte) er case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -94,7 +95,7 @@ func (s *TextClassificationInferenceUpdateOptions) UnmarshalJSON(data []byte) er case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/types/textembedding.go b/typedapi/types/textembedding.go index a2db99dc3d..84f87e6fd7 100644 --- a/typedapi/types/textembedding.go +++ b/typedapi/types/textembedding.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TextEmbedding type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Knn.ts#L56-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Knn.ts#L56-L59 type TextEmbedding struct { ModelId string `json:"model_id"` ModelText string `json:"model_text"` @@ -54,7 +55,7 @@ func (s *TextEmbedding) UnmarshalJSON(data []byte) error { case "model_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -66,7 +67,7 @@ func (s *TextEmbedding) UnmarshalJSON(data []byte) error { case "model_text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelText", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/textembeddingbyteresult.go b/typedapi/types/textembeddingbyteresult.go index d464a71fc9..f7b81bfebe 100644 --- a/typedapi/types/textembeddingbyteresult.go +++ b/typedapi/types/textembeddingbyteresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TextEmbeddingByteResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/_types/Results.ts#L45-L50 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/_types/Results.ts#L45-L50 type TextEmbeddingByteResult struct { Embedding []byte `json:"embedding"` } @@ -51,7 +52,7 @@ func (s *TextEmbeddingByteResult) UnmarshalJSON(data []byte) error { case "embedding": if err := dec.Decode(&s.Embedding); err != nil { - return err + return fmt.Errorf("%s | %w", "Embedding", err) } } diff --git a/typedapi/types/textembeddinginferenceoptions.go b/typedapi/types/textembeddinginferenceoptions.go index 845646a895..09eb57b879 100644 --- a/typedapi/types/textembeddinginferenceoptions.go +++ b/typedapi/types/textembeddinginferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TextEmbeddingInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L237-L245 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L237-L245 type TextEmbeddingInferenceOptions struct { // EmbeddingSize The number of dimensions in the embedding output EmbeddingSize *int `json:"embedding_size,omitempty"` @@ -64,7 +65,7 @@ func (s *TextEmbeddingInferenceOptions) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EmbeddingSize", err) } s.EmbeddingSize = &value case float64: @@ -75,7 +76,7 @@ func (s *TextEmbeddingInferenceOptions) UnmarshalJSON(data []byte) error { case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -86,7 +87,7 @@ func (s *TextEmbeddingInferenceOptions) UnmarshalJSON(data []byte) error { case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/types/textembeddinginferenceupdateoptions.go b/typedapi/types/textembeddinginferenceupdateoptions.go index 0724cf2b9d..4c6376668f 100644 --- a/typedapi/types/textembeddinginferenceupdateoptions.go +++ b/typedapi/types/textembeddinginferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TextEmbeddingInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L392-L396 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L392-L396 type TextEmbeddingInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. @@ -56,7 +57,7 @@ func (s *TextEmbeddingInferenceUpdateOptions) UnmarshalJSON(data []byte) error { case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -67,7 +68,7 @@ func (s *TextEmbeddingInferenceUpdateOptions) UnmarshalJSON(data []byte) error { case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/types/textembeddingresult.go b/typedapi/types/textembeddingresult.go index ffa94b81c5..b1358b294e 100644 --- a/typedapi/types/textembeddingresult.go +++ b/typedapi/types/textembeddingresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TextEmbeddingResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/inference/_types/Results.ts#L52-L57 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/inference/_types/Results.ts#L52-L57 type TextEmbeddingResult struct { Embedding []float32 `json:"embedding"` } @@ -51,7 +52,7 @@ func (s *TextEmbeddingResult) UnmarshalJSON(data []byte) error { case "embedding": if err := dec.Decode(&s.Embedding); err != nil { - return err + return fmt.Errorf("%s | %w", "Embedding", err) } } diff --git a/typedapi/types/textexpansioninferenceoptions.go b/typedapi/types/textexpansioninferenceoptions.go index 43ec6af451..51cf4827c0 100644 --- a/typedapi/types/textexpansioninferenceoptions.go +++ b/typedapi/types/textexpansioninferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TextExpansionInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L247-L253 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L247-L253 type TextExpansionInferenceOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. @@ -57,7 +58,7 @@ func (s *TextExpansionInferenceOptions) UnmarshalJSON(data []byte) error { case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -68,7 +69,7 @@ func (s *TextExpansionInferenceOptions) UnmarshalJSON(data []byte) error { case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/types/textexpansioninferenceupdateoptions.go b/typedapi/types/textexpansioninferenceupdateoptions.go index 5a9b4fafa9..f05a1a9dec 100644 --- a/typedapi/types/textexpansioninferenceupdateoptions.go +++ b/typedapi/types/textexpansioninferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TextExpansionInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L398-L402 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L398-L402 type TextExpansionInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. @@ -56,7 +57,7 @@ func (s *TextExpansionInferenceUpdateOptions) UnmarshalJSON(data []byte) error { case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -67,7 +68,7 @@ func (s *TextExpansionInferenceUpdateOptions) UnmarshalJSON(data []byte) error { case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/types/textexpansionquery.go b/typedapi/types/textexpansionquery.go index 9e5c58e3c2..e957cb3bde 100644 --- a/typedapi/types/textexpansionquery.go +++ b/typedapi/types/textexpansionquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TextExpansionQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/TextExpansionQuery.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/TextExpansionQuery.ts#L23-L33 type TextExpansionQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -69,7 +70,7 @@ func (s *TextExpansionQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -81,7 +82,7 @@ func (s *TextExpansionQuery) UnmarshalJSON(data []byte) error { case "model_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -93,7 +94,7 @@ func (s *TextExpansionQuery) UnmarshalJSON(data []byte) error { case "model_text": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelText", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,13 +105,13 @@ func (s *TextExpansionQuery) UnmarshalJSON(data []byte) error { case "pruning_config": if err := dec.Decode(&s.PruningConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "PruningConfig", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/textindexprefixes.go b/typedapi/types/textindexprefixes.go index 9311eeec99..2539ecd8de 100644 --- a/typedapi/types/textindexprefixes.go +++ b/typedapi/types/textindexprefixes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TextIndexPrefixes type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L249-L252 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L250-L253 type TextIndexPrefixes struct { MaxChars int `json:"max_chars"` MinChars int `json:"min_chars"` @@ -59,7 +60,7 @@ func (s *TextIndexPrefixes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxChars", err) } s.MaxChars = value case float64: @@ -75,7 +76,7 @@ func (s *TextIndexPrefixes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinChars", err) } s.MinChars = value case float64: diff --git a/typedapi/types/textproperty.go b/typedapi/types/textproperty.go index dd52a32975..8124f2788b 100644 --- a/typedapi/types/textproperty.go +++ b/typedapi/types/textproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // TextProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L254-L270 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L255-L271 type TextProperty struct { Analyzer *string `json:"analyzer,omitempty"` Boost *Float64 `json:"boost,omitempty"` @@ -80,7 +81,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -96,7 +97,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -111,19 +112,19 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "eager_global_ordinals": @@ -133,7 +134,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EagerGlobalOrdinals", err) } s.EagerGlobalOrdinals = &value case bool: @@ -147,7 +148,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Fielddata", err) } s.Fielddata = &value case bool: @@ -156,7 +157,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case "fielddata_frequency_filter": if err := dec.Decode(&s.FielddataFrequencyFilter); err != nil { - return err + return fmt.Errorf("%s | %w", "FielddataFrequencyFilter", err) } case "fields": @@ -474,7 +475,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -489,7 +490,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -498,7 +499,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case "index_options": if err := dec.Decode(&s.IndexOptions); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexOptions", err) } case "index_phrases": @@ -508,7 +509,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPhrases", err) } s.IndexPhrases = &value case bool: @@ -517,7 +518,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case "index_prefixes": if err := dec.Decode(&s.IndexPrefixes); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexPrefixes", err) } case "meta": @@ -525,7 +526,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "norms": @@ -535,7 +536,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Norms", err) } s.Norms = &value case bool: @@ -550,7 +551,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PositionIncrementGap", err) } s.PositionIncrementGap = &value case float64: @@ -868,7 +869,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case "search_analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchAnalyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -880,7 +881,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case "search_quote_analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchQuoteAnalyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -892,7 +893,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -908,7 +909,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -917,12 +918,12 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case "term_vector": if err := dec.Decode(&s.TermVector); err != nil { - return err + return fmt.Errorf("%s | %w", "TermVector", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/texttoanalyze.go b/typedapi/types/texttoanalyze.go index b5eafa9352..bf87adf9cc 100644 --- a/typedapi/types/texttoanalyze.go +++ b/typedapi/types/texttoanalyze.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TextToAnalyze type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/analyze/types.ts#L66-L66 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/analyze/types.ts#L66-L66 type TextToAnalyze []string diff --git a/typedapi/types/threadcount.go b/typedapi/types/threadcount.go index d12b3398dc..8245647bb0 100644 --- a/typedapi/types/threadcount.go +++ b/typedapi/types/threadcount.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ThreadCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L1004-L1029 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L1004-L1029 type ThreadCount struct { // Active Number of active threads in the thread pool. Active *int64 `json:"active,omitempty"` @@ -68,7 +69,7 @@ func (s *ThreadCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Active", err) } s.Active = &value case float64: @@ -83,7 +84,7 @@ func (s *ThreadCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Completed", err) } s.Completed = &value case float64: @@ -98,7 +99,7 @@ func (s *ThreadCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Largest", err) } s.Largest = &value case float64: @@ -113,7 +114,7 @@ func (s *ThreadCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Queue", err) } s.Queue = &value case float64: @@ -128,7 +129,7 @@ func (s *ThreadCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Rejected", err) } s.Rejected = &value case float64: @@ -143,7 +144,7 @@ func (s *ThreadCount) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Threads", err) } s.Threads = &value case float64: diff --git a/typedapi/types/threadpoolrecord.go b/typedapi/types/threadpoolrecord.go index 5793bf96b2..8e681a4cc9 100644 --- a/typedapi/types/threadpoolrecord.go +++ b/typedapi/types/threadpoolrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ThreadPoolRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/thread_pool/types.ts#L22-L124 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/thread_pool/types.ts#L22-L124 type ThreadPoolRecord struct { // Active The number of active threads in the current thread pool. Active *string `json:"active,omitempty"` @@ -94,7 +95,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "active", "a": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Active", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -106,7 +107,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "completed", "c": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Completed", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -118,7 +119,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "core", "cr": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Core", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -130,7 +131,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "ephemeral_node_id", "eid": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "EphemeralNodeId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -142,7 +143,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "host", "h": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -154,7 +155,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "ip", "i": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -166,7 +167,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "keep_alive", "ka": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "KeepAlive", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -178,7 +179,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "largest", "l": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Largest", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -190,7 +191,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "max", "mx": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -202,7 +203,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "name", "n": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -213,13 +214,13 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "node_id", "id": if err := dec.Decode(&s.NodeId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } case "node_name", "nn": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -231,7 +232,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "pid", "p": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pid", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -243,7 +244,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "pool_size", "psz": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PoolSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -255,7 +256,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "port", "po": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Port", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -267,7 +268,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "queue", "q": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Queue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -279,7 +280,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "queue_size", "qs": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueueSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -291,7 +292,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "rejected", "r": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Rejected", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -303,7 +304,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "size", "sz": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -315,7 +316,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { case "type", "t": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/throttlestate.go b/typedapi/types/throttlestate.go index d874d656f7..efd6791d3f 100644 --- a/typedapi/types/throttlestate.go +++ b/typedapi/types/throttlestate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ThrottleState type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Action.ts#L126-L129 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Action.ts#L126-L129 type ThrottleState struct { Reason string `json:"reason"` Timestamp DateTime `json:"timestamp"` @@ -54,7 +55,7 @@ func (s *ThrottleState) UnmarshalJSON(data []byte) error { case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -65,7 +66,7 @@ func (s *ThrottleState) UnmarshalJSON(data []byte) error { case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } } diff --git a/typedapi/types/timeofmonth.go b/typedapi/types/timeofmonth.go index 9d196c8204..e16bc3355d 100644 --- a/typedapi/types/timeofmonth.go +++ b/typedapi/types/timeofmonth.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TimeOfMonth type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Schedule.ts#L110-L113 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Schedule.ts#L110-L113 type TimeOfMonth struct { At []string `json:"at"` On []int `json:"on"` diff --git a/typedapi/types/timeofweek.go b/typedapi/types/timeofweek.go index ec44f306f7..f3d623c4ce 100644 --- a/typedapi/types/timeofweek.go +++ b/typedapi/types/timeofweek.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // TimeOfWeek type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Schedule.ts#L115-L118 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Schedule.ts#L115-L118 type TimeOfWeek struct { At []string `json:"at"` On []day.Day `json:"on"` diff --git a/typedapi/types/timeofyear.go b/typedapi/types/timeofyear.go index f78a1d8df2..2bac1dc6cc 100644 --- a/typedapi/types/timeofyear.go +++ b/typedapi/types/timeofyear.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // TimeOfYear type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Schedule.ts#L120-L124 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Schedule.ts#L120-L124 type TimeOfYear struct { At []string `json:"at"` Int []month.Month `json:"int"` diff --git a/typedapi/types/timesync.go b/typedapi/types/timesync.go index 0e264128ac..7a437b4297 100644 --- a/typedapi/types/timesync.go +++ b/typedapi/types/timesync.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TimeSync type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/_types/Transform.ts#L177-L189 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/_types/Transform.ts#L177-L189 type TimeSync struct { // Delay The time delay between the current time and the latest input data time. Delay Duration `json:"delay,omitempty"` @@ -58,12 +59,12 @@ func (s *TimeSync) UnmarshalJSON(data []byte) error { case "delay": if err := dec.Decode(&s.Delay); err != nil { - return err + return fmt.Errorf("%s | %w", "Delay", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } } diff --git a/typedapi/types/timingstats.go b/typedapi/types/timingstats.go index 13b9ec372b..08b1c4637b 100644 --- a/typedapi/types/timingstats.go +++ b/typedapi/types/timingstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TimingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L563-L568 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L563-L568 type TimingStats struct { // ElapsedTime Runtime of the analysis in milliseconds. ElapsedTime int64 `json:"elapsed_time"` @@ -54,12 +55,12 @@ func (s *TimingStats) UnmarshalJSON(data []byte) error { case "elapsed_time": if err := dec.Decode(&s.ElapsedTime); err != nil { - return err + return fmt.Errorf("%s | %w", "ElapsedTime", err) } case "iteration_time": if err := dec.Decode(&s.IterationTime); err != nil { - return err + return fmt.Errorf("%s | %w", "IterationTime", err) } } diff --git a/typedapi/types/tokencountproperty.go b/typedapi/types/tokencountproperty.go index d376c2cc3a..233239fe9c 100644 --- a/typedapi/types/tokencountproperty.go +++ b/typedapi/types/tokencountproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // TokenCountProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/specialized.ts#L79-L86 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/specialized.ts#L79-L86 type TokenCountProperty struct { Analyzer *string `json:"analyzer,omitempty"` Boost *Float64 `json:"boost,omitempty"` @@ -70,7 +71,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { case "analyzer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Analyzer", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -86,7 +87,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -101,13 +102,13 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -118,7 +119,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -127,7 +128,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "enable_position_increments": @@ -137,7 +138,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EnablePositionIncrements", err) } s.EnablePositionIncrements = &value case bool: @@ -459,7 +460,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -474,7 +475,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -486,7 +487,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": @@ -496,7 +497,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } f := Float64(value) s.NullValue = &f @@ -815,7 +816,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -831,7 +832,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -840,7 +841,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/tokendetail.go b/typedapi/types/tokendetail.go index 3c56a29065..5e25adac75 100644 --- a/typedapi/types/tokendetail.go +++ b/typedapi/types/tokendetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TokenDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/analyze/types.ts#L68-L71 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/analyze/types.ts#L68-L71 type TokenDetail struct { Name string `json:"name"` Tokens []ExplainAnalyzeToken `json:"tokens"` @@ -54,7 +55,7 @@ func (s *TokenDetail) UnmarshalJSON(data []byte) error { case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -65,7 +66,7 @@ func (s *TokenDetail) UnmarshalJSON(data []byte) error { case "tokens": if err := dec.Decode(&s.Tokens); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokens", err) } } diff --git a/typedapi/types/tokenfilter.go b/typedapi/types/tokenfilter.go index 2dfe434537..74b6e42241 100644 --- a/typedapi/types/tokenfilter.go +++ b/typedapi/types/tokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // TokenFilterDefinition // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L344-L346 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L344-L346 type TokenFilter interface{} diff --git a/typedapi/types/tokenfilterdefinition.go b/typedapi/types/tokenfilterdefinition.go index eb29b72dc2..877b9f5462 100644 --- a/typedapi/types/tokenfilterdefinition.go +++ b/typedapi/types/tokenfilterdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -70,5 +70,5 @@ package types // PhoneticTokenFilter // DictionaryDecompounderTokenFilter // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L348-L400 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L348-L400 type TokenFilterDefinition interface{} diff --git a/typedapi/types/tokenizationconfigcontainer.go b/typedapi/types/tokenizationconfigcontainer.go index a1df673dab..76e718d2ac 100644 --- a/typedapi/types/tokenizationconfigcontainer.go +++ b/typedapi/types/tokenizationconfigcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TokenizationConfigContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L110-L129 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L110-L129 type TokenizationConfigContainer struct { // Bert Indicates BERT tokenization and its options Bert *NlpBertTokenizationConfig `json:"bert,omitempty"` diff --git a/typedapi/types/tokenizer.go b/typedapi/types/tokenizer.go index 377aa2801b..d78b5f5f55 100644 --- a/typedapi/types/tokenizer.go +++ b/typedapi/types/tokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // string // TokenizerDefinition // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L120-L122 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L120-L122 type Tokenizer interface{} diff --git a/typedapi/types/tokenizerdefinition.go b/typedapi/types/tokenizerdefinition.go index 2a518152b6..bf926f0d71 100644 --- a/typedapi/types/tokenizerdefinition.go +++ b/typedapi/types/tokenizerdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -37,5 +37,5 @@ package types // PatternTokenizer // IcuTokenizer // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L124-L142 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L124-L142 type TokenizerDefinition interface{} diff --git a/typedapi/types/tokenpruningconfig.go b/typedapi/types/tokenpruningconfig.go index 4537f5b641..dea6a365a7 100644 --- a/typedapi/types/tokenpruningconfig.go +++ b/typedapi/types/tokenpruningconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TokenPruningConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/TokenPruningConfig.ts#L22-L35 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/TokenPruningConfig.ts#L22-L35 type TokenPruningConfig struct { // OnlyScorePrunedTokens Whether to only score pruned tokens, vs only scoring kept tokens. OnlyScorePrunedTokens *bool `json:"only_score_pruned_tokens,omitempty"` @@ -65,7 +66,7 @@ func (s *TokenPruningConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OnlyScorePrunedTokens", err) } s.OnlyScorePrunedTokens = &value case bool: @@ -80,7 +81,7 @@ func (s *TokenPruningConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TokensFreqRatioThreshold", err) } s.TokensFreqRatioThreshold = &value case float64: @@ -95,7 +96,7 @@ func (s *TokenPruningConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "TokensWeightThreshold", err) } f := float32(value) s.TokensWeightThreshold = &f diff --git a/typedapi/types/topclassentry.go b/typedapi/types/topclassentry.go index 91f3e0c888..1ec67bd9c0 100644 --- a/typedapi/types/topclassentry.go +++ b/typedapi/types/topclassentry.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TopClassEntry type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L440-L444 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L440-L444 type TopClassEntry struct { ClassName string `json:"class_name"` ClassProbability Float64 `json:"class_probability"` @@ -55,7 +56,7 @@ func (s *TopClassEntry) UnmarshalJSON(data []byte) error { case "class_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -71,7 +72,7 @@ func (s *TopClassEntry) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ClassProbability", err) } f := Float64(value) s.ClassProbability = f @@ -87,7 +88,7 @@ func (s *TopClassEntry) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ClassScore", err) } f := Float64(value) s.ClassScore = f diff --git a/typedapi/types/tophit.go b/typedapi/types/tophit.go index 1fe6c91f6a..4c3d961bb5 100644 --- a/typedapi/types/tophit.go +++ b/typedapi/types/tophit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TopHit type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/text_structure/find_structure/types.ts#L35-L38 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/text_structure/find_structure/types.ts#L35-L38 type TopHit struct { Count int64 `json:"count"` Value json.RawMessage `json:"value,omitempty"` @@ -58,7 +59,7 @@ func (s *TopHit) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: @@ -68,7 +69,7 @@ func (s *TopHit) UnmarshalJSON(data []byte) error { case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } } diff --git a/typedapi/types/tophitsaggregate.go b/typedapi/types/tophitsaggregate.go index 2e6716116a..6e01dc5141 100644 --- a/typedapi/types/tophitsaggregate.go +++ b/typedapi/types/tophitsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TopHitsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L654-L657 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L654-L657 type TopHitsAggregate struct { Hits HitsMetadata `json:"hits"` Meta Metadata `json:"meta,omitempty"` @@ -52,12 +53,12 @@ func (s *TopHitsAggregate) UnmarshalJSON(data []byte) error { case "hits": if err := dec.Decode(&s.Hits); err != nil { - return err + return fmt.Errorf("%s | %w", "Hits", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/tophitsaggregation.go b/typedapi/types/tophitsaggregation.go index f03148e99e..f6a956cde2 100644 --- a/typedapi/types/tophitsaggregation.go +++ b/typedapi/types/tophitsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TopHitsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L337-L392 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L337-L392 type TopHitsAggregation struct { // DocvalueFields Fields for which to return doc values. DocvalueFields []string `json:"docvalue_fields,omitempty"` @@ -91,13 +92,13 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "DocvalueFields", err) } s.DocvalueFields = append(s.DocvalueFields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.DocvalueFields); err != nil { - return err + return fmt.Errorf("%s | %w", "DocvalueFields", err) } } @@ -108,7 +109,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Explain", err) } s.Explain = &value case bool: @@ -117,7 +118,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "from": @@ -128,7 +129,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } s.From = &value case float64: @@ -138,18 +139,18 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { case "highlight": if err := dec.Decode(&s.Highlight); err != nil { - return err + return fmt.Errorf("%s | %w", "Highlight", err) } case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -158,7 +159,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -167,7 +168,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -175,7 +176,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -187,7 +188,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { s.ScriptFields = make(map[string]ScriptField, 0) } if err := dec.Decode(&s.ScriptFields); err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptFields", err) } case "seq_no_primary_term": @@ -197,7 +198,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNoPrimaryTerm", err) } s.SeqNoPrimaryTerm = &value case bool: @@ -212,7 +213,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -226,19 +227,19 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(SortCombinations) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } s.Sort = append(s.Sort, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } case "stored_fields": @@ -247,13 +248,13 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } s.StoredFields = append(s.StoredFields, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StoredFields); err != nil { - return err + return fmt.Errorf("%s | %w", "StoredFields", err) } } @@ -264,7 +265,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TrackScores", err) } s.TrackScores = &value case bool: @@ -278,7 +279,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } s.Version = &value case bool: diff --git a/typedapi/types/topleftbottomrightgeobounds.go b/typedapi/types/topleftbottomrightgeobounds.go index b482ab2bb4..0affdee099 100644 --- a/typedapi/types/topleftbottomrightgeobounds.go +++ b/typedapi/types/topleftbottomrightgeobounds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TopLeftBottomRightGeoBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Geo.ts#L161-L164 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Geo.ts#L161-L164 type TopLeftBottomRightGeoBounds struct { BottomRight GeoLocation `json:"bottom_right"` TopLeft GeoLocation `json:"top_left"` @@ -52,12 +53,12 @@ func (s *TopLeftBottomRightGeoBounds) UnmarshalJSON(data []byte) error { case "bottom_right": if err := dec.Decode(&s.BottomRight); err != nil { - return err + return fmt.Errorf("%s | %w", "BottomRight", err) } case "top_left": if err := dec.Decode(&s.TopLeft); err != nil { - return err + return fmt.Errorf("%s | %w", "TopLeft", err) } } diff --git a/typedapi/types/topmetrics.go b/typedapi/types/topmetrics.go index 87acd2be97..8e26830dd2 100644 --- a/typedapi/types/topmetrics.go +++ b/typedapi/types/topmetrics.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TopMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L729-L733 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L729-L733 type TopMetrics struct { Metrics map[string]FieldValue `json:"metrics"` Sort []FieldValue `json:"sort"` diff --git a/typedapi/types/topmetricsaggregate.go b/typedapi/types/topmetricsaggregate.go index eed0dcaee5..69891d12f5 100644 --- a/typedapi/types/topmetricsaggregate.go +++ b/typedapi/types/topmetricsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TopMetricsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L724-L727 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L724-L727 type TopMetricsAggregate struct { Meta Metadata `json:"meta,omitempty"` Top []TopMetrics `json:"top"` @@ -52,12 +53,12 @@ func (s *TopMetricsAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "top": if err := dec.Decode(&s.Top); err != nil { - return err + return fmt.Errorf("%s | %w", "Top", err) } } diff --git a/typedapi/types/topmetricsaggregation.go b/typedapi/types/topmetricsaggregation.go index 559cdf7bdb..28fc25c01c 100644 --- a/typedapi/types/topmetricsaggregation.go +++ b/typedapi/types/topmetricsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TopMetricsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L394-L408 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L394-L408 type TopMetricsAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -63,7 +64,7 @@ func (s *TopMetricsAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "metrics": @@ -72,25 +73,25 @@ func (s *TopMetricsAggregation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := NewTopMetricsValue() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Metrics", err) } s.Metrics = append(s.Metrics, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Metrics); err != nil { - return err + return fmt.Errorf("%s | %w", "Metrics", err) } } case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -99,7 +100,7 @@ func (s *TopMetricsAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -108,7 +109,7 @@ func (s *TopMetricsAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -116,7 +117,7 @@ func (s *TopMetricsAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -131,7 +132,7 @@ func (s *TopMetricsAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -145,13 +146,13 @@ func (s *TopMetricsAggregation) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(SortCombinations) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } s.Sort = append(s.Sort, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } } diff --git a/typedapi/types/topmetricsvalue.go b/typedapi/types/topmetricsvalue.go index 403e8b19de..6deed31aef 100644 --- a/typedapi/types/topmetricsvalue.go +++ b/typedapi/types/topmetricsvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TopMetricsValue type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L410-L415 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L410-L415 type TopMetricsValue struct { // Field A field to return as a metric. Field string `json:"field"` @@ -52,7 +53,7 @@ func (s *TopMetricsValue) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } } diff --git a/typedapi/types/toprightbottomleftgeobounds.go b/typedapi/types/toprightbottomleftgeobounds.go index d6b904ddd1..7b98d5fb77 100644 --- a/typedapi/types/toprightbottomleftgeobounds.go +++ b/typedapi/types/toprightbottomleftgeobounds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TopRightBottomLeftGeoBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Geo.ts#L166-L169 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Geo.ts#L166-L169 type TopRightBottomLeftGeoBounds struct { BottomLeft GeoLocation `json:"bottom_left"` TopRight GeoLocation `json:"top_right"` @@ -52,12 +53,12 @@ func (s *TopRightBottomLeftGeoBounds) UnmarshalJSON(data []byte) error { case "bottom_left": if err := dec.Decode(&s.BottomLeft); err != nil { - return err + return fmt.Errorf("%s | %w", "BottomLeft", err) } case "top_right": if err := dec.Decode(&s.TopRight); err != nil { - return err + return fmt.Errorf("%s | %w", "TopRight", err) } } diff --git a/typedapi/types/totalfeatureimportance.go b/typedapi/types/totalfeatureimportance.go index a92ebd9c65..5de06e2365 100644 --- a/typedapi/types/totalfeatureimportance.go +++ b/typedapi/types/totalfeatureimportance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TotalFeatureImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L233-L240 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L233-L240 type TotalFeatureImportance struct { // Classes If the trained model is a classification model, feature importance statistics // are gathered per target class value. @@ -58,17 +59,17 @@ func (s *TotalFeatureImportance) UnmarshalJSON(data []byte) error { case "classes": if err := dec.Decode(&s.Classes); err != nil { - return err + return fmt.Errorf("%s | %w", "Classes", err) } case "feature_name": if err := dec.Decode(&s.FeatureName); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureName", err) } case "importance": if err := dec.Decode(&s.Importance); err != nil { - return err + return fmt.Errorf("%s | %w", "Importance", err) } } diff --git a/typedapi/types/totalfeatureimportanceclass.go b/typedapi/types/totalfeatureimportanceclass.go index 5daec87eda..9997728585 100644 --- a/typedapi/types/totalfeatureimportanceclass.go +++ b/typedapi/types/totalfeatureimportanceclass.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TotalFeatureImportanceClass type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L242-L247 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L242-L247 type TotalFeatureImportanceClass struct { // ClassName The target class value. Could be a string, boolean, or number. ClassName string `json:"class_name"` @@ -55,12 +56,12 @@ func (s *TotalFeatureImportanceClass) UnmarshalJSON(data []byte) error { case "class_name": if err := dec.Decode(&s.ClassName); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassName", err) } case "importance": if err := dec.Decode(&s.Importance); err != nil { - return err + return fmt.Errorf("%s | %w", "Importance", err) } } diff --git a/typedapi/types/totalfeatureimportancestatistics.go b/typedapi/types/totalfeatureimportancestatistics.go index cfdc657ddd..a226d5491c 100644 --- a/typedapi/types/totalfeatureimportancestatistics.go +++ b/typedapi/types/totalfeatureimportancestatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TotalFeatureImportanceStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L249-L256 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L249-L256 type TotalFeatureImportanceStatistics struct { // Max The maximum importance value across all the training data for this feature. Max int `json:"max"` @@ -65,7 +66,7 @@ func (s *TotalFeatureImportanceStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } s.Max = value case float64: @@ -80,7 +81,7 @@ func (s *TotalFeatureImportanceStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MeanMagnitude", err) } f := Float64(value) s.MeanMagnitude = f @@ -97,7 +98,7 @@ func (s *TotalFeatureImportanceStatistics) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } s.Min = value case float64: diff --git a/typedapi/types/totaluserprofiles.go b/typedapi/types/totaluserprofiles.go index ca56a0d320..919c65a952 100644 --- a/typedapi/types/totaluserprofiles.go +++ b/typedapi/types/totaluserprofiles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TotalUserProfiles type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/suggest_user_profiles/Response.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/suggest_user_profiles/Response.ts#L24-L27 type TotalUserProfiles struct { Relation string `json:"relation"` Value int64 `json:"value"` @@ -53,7 +54,7 @@ func (s *TotalUserProfiles) UnmarshalJSON(data []byte) error { case "relation": if err := dec.Decode(&s.Relation); err != nil { - return err + return fmt.Errorf("%s | %w", "Relation", err) } case "value": @@ -63,7 +64,7 @@ func (s *TotalUserProfiles) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } s.Value = value case float64: diff --git a/typedapi/types/trackhits.go b/typedapi/types/trackhits.go index 30591deaa6..0daec3fb84 100644 --- a/typedapi/types/trackhits.go +++ b/typedapi/types/trackhits.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // bool // int // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/search/_types/hits.ts#L142-L150 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/search/_types/hits.ts#L142-L150 type TrackHits interface{} diff --git a/typedapi/types/trainedmodel.go b/typedapi/types/trainedmodel.go index ea600c1bed..6844ce5c38 100644 --- a/typedapi/types/trainedmodel.go +++ b/typedapi/types/trainedmodel.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TrainedModel type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/types.ts#L60-L72 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/types.ts#L60-L72 type TrainedModel struct { // Ensemble The definition for an ensemble model Ensemble *Ensemble `json:"ensemble,omitempty"` diff --git a/typedapi/types/trainedmodelassignment.go b/typedapi/types/trainedmodelassignment.go index e02f77892e..517b884a78 100644 --- a/typedapi/types/trainedmodelassignment.go +++ b/typedapi/types/trainedmodelassignment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // TrainedModelAssignment type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L403-L418 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L403-L418 type TrainedModelAssignment struct { // AssignmentState The overall assignment state. AssignmentState deploymentassignmentstate.DeploymentAssignmentState `json:"assignment_state"` @@ -61,7 +62,7 @@ func (s *TrainedModelAssignment) UnmarshalJSON(data []byte) error { case "assignment_state": if err := dec.Decode(&s.AssignmentState); err != nil { - return err + return fmt.Errorf("%s | %w", "AssignmentState", err) } case "max_assigned_allocations": @@ -72,7 +73,7 @@ func (s *TrainedModelAssignment) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAssignedAllocations", err) } s.MaxAssignedAllocations = &value case float64: @@ -85,17 +86,17 @@ func (s *TrainedModelAssignment) UnmarshalJSON(data []byte) error { s.RoutingTable = make(map[string]TrainedModelAssignmentRoutingTable, 0) } if err := dec.Decode(&s.RoutingTable); err != nil { - return err + return fmt.Errorf("%s | %w", "RoutingTable", err) } case "start_time": if err := dec.Decode(&s.StartTime); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTime", err) } case "task_parameters": if err := dec.Decode(&s.TaskParameters); err != nil { - return err + return fmt.Errorf("%s | %w", "TaskParameters", err) } } diff --git a/typedapi/types/trainedmodelassignmentroutingtable.go b/typedapi/types/trainedmodelassignmentroutingtable.go index abf66f4448..f07e16b6e6 100644 --- a/typedapi/types/trainedmodelassignmentroutingtable.go +++ b/typedapi/types/trainedmodelassignmentroutingtable.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // TrainedModelAssignmentRoutingTable type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L374-L392 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L374-L392 type TrainedModelAssignmentRoutingTable struct { // CurrentAllocations Current number of allocations. CurrentAllocations int `json:"current_allocations"` @@ -68,7 +69,7 @@ func (s *TrainedModelAssignmentRoutingTable) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CurrentAllocations", err) } s.CurrentAllocations = value case float64: @@ -79,7 +80,7 @@ func (s *TrainedModelAssignmentRoutingTable) UnmarshalJSON(data []byte) error { case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -90,7 +91,7 @@ func (s *TrainedModelAssignmentRoutingTable) UnmarshalJSON(data []byte) error { case "routing_state": if err := dec.Decode(&s.RoutingState); err != nil { - return err + return fmt.Errorf("%s | %w", "RoutingState", err) } case "target_allocations": @@ -101,7 +102,7 @@ func (s *TrainedModelAssignmentRoutingTable) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TargetAllocations", err) } s.TargetAllocations = value case float64: diff --git a/typedapi/types/trainedmodelassignmenttaskparameters.go b/typedapi/types/trainedmodelassignmenttaskparameters.go index ce74323a66..ef3c710bb0 100644 --- a/typedapi/types/trainedmodelassignmenttaskparameters.go +++ b/typedapi/types/trainedmodelassignmenttaskparameters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // TrainedModelAssignmentTaskParameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L316-L349 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L316-L349 type TrainedModelAssignmentTaskParameters struct { // CacheSize The size of the trained model cache. CacheSize ByteSize `json:"cache_size"` @@ -68,12 +69,12 @@ func (s *TrainedModelAssignmentTaskParameters) UnmarshalJSON(data []byte) error case "cache_size": if err := dec.Decode(&s.CacheSize); err != nil { - return err + return fmt.Errorf("%s | %w", "CacheSize", err) } case "deployment_id": if err := dec.Decode(&s.DeploymentId); err != nil { - return err + return fmt.Errorf("%s | %w", "DeploymentId", err) } case "model_bytes": @@ -84,7 +85,7 @@ func (s *TrainedModelAssignmentTaskParameters) UnmarshalJSON(data []byte) error case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ModelBytes", err) } s.ModelBytes = value case float64: @@ -94,7 +95,7 @@ func (s *TrainedModelAssignmentTaskParameters) UnmarshalJSON(data []byte) error case "model_id": if err := dec.Decode(&s.ModelId); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelId", err) } case "number_of_allocations": @@ -105,7 +106,7 @@ func (s *TrainedModelAssignmentTaskParameters) UnmarshalJSON(data []byte) error case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfAllocations", err) } s.NumberOfAllocations = value case float64: @@ -115,7 +116,7 @@ func (s *TrainedModelAssignmentTaskParameters) UnmarshalJSON(data []byte) error case "priority": if err := dec.Decode(&s.Priority); err != nil { - return err + return fmt.Errorf("%s | %w", "Priority", err) } case "queue_capacity": @@ -126,7 +127,7 @@ func (s *TrainedModelAssignmentTaskParameters) UnmarshalJSON(data []byte) error case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "QueueCapacity", err) } s.QueueCapacity = value case float64: @@ -142,7 +143,7 @@ func (s *TrainedModelAssignmentTaskParameters) UnmarshalJSON(data []byte) error case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ThreadsPerAllocation", err) } s.ThreadsPerAllocation = value case float64: diff --git a/typedapi/types/trainedmodelconfig.go b/typedapi/types/trainedmodelconfig.go index a31aa7a62d..7f2768e5b8 100644 --- a/typedapi/types/trainedmodelconfig.go +++ b/typedapi/types/trainedmodelconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // TrainedModelConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L165-L200 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L165-L200 type TrainedModelConfig struct { CompressedDefinition *string `json:"compressed_definition,omitempty"` // CreateTime The time when the trained model was created. @@ -93,7 +94,7 @@ func (s *TrainedModelConfig) UnmarshalJSON(data []byte) error { case "compressed_definition": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CompressedDefinition", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -104,13 +105,13 @@ func (s *TrainedModelConfig) UnmarshalJSON(data []byte) error { case "create_time": if err := dec.Decode(&s.CreateTime); err != nil { - return err + return fmt.Errorf("%s | %w", "CreateTime", err) } case "created_by": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CreatedBy", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -124,13 +125,13 @@ func (s *TrainedModelConfig) UnmarshalJSON(data []byte) error { s.DefaultFieldMap = make(map[string]string, 0) } if err := dec.Decode(&s.DefaultFieldMap); err != nil { - return err + return fmt.Errorf("%s | %w", "DefaultFieldMap", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -147,7 +148,7 @@ func (s *TrainedModelConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EstimatedHeapMemoryUsageBytes", err) } s.EstimatedHeapMemoryUsageBytes = &value case float64: @@ -163,7 +164,7 @@ func (s *TrainedModelConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EstimatedOperations", err) } s.EstimatedOperations = &value case float64: @@ -178,7 +179,7 @@ func (s *TrainedModelConfig) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FullyDefined", err) } s.FullyDefined = &value case bool: @@ -187,18 +188,18 @@ func (s *TrainedModelConfig) UnmarshalJSON(data []byte) error { case "inference_config": if err := dec.Decode(&s.InferenceConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "InferenceConfig", err) } case "input": if err := dec.Decode(&s.Input); err != nil { - return err + return fmt.Errorf("%s | %w", "Input", err) } case "license_level": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LicenseLevel", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -209,42 +210,42 @@ func (s *TrainedModelConfig) UnmarshalJSON(data []byte) error { case "location": if err := dec.Decode(&s.Location); err != nil { - return err + return fmt.Errorf("%s | %w", "Location", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "model_id": if err := dec.Decode(&s.ModelId); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelId", err) } case "model_size_bytes": if err := dec.Decode(&s.ModelSizeBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSizeBytes", err) } case "model_type": if err := dec.Decode(&s.ModelType); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelType", err) } case "prefix_strings": if err := dec.Decode(&s.PrefixStrings); err != nil { - return err + return fmt.Errorf("%s | %w", "PrefixStrings", err) } case "tags": if err := dec.Decode(&s.Tags); err != nil { - return err + return fmt.Errorf("%s | %w", "Tags", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/trainedmodelconfiginput.go b/typedapi/types/trainedmodelconfiginput.go index 6355b5a85f..88337b429f 100644 --- a/typedapi/types/trainedmodelconfiginput.go +++ b/typedapi/types/trainedmodelconfiginput.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TrainedModelConfigInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L202-L205 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L202-L205 type TrainedModelConfigInput struct { // FieldNames An array of input field names for the model. FieldNames []string `json:"field_names"` diff --git a/typedapi/types/trainedmodelconfigmetadata.go b/typedapi/types/trainedmodelconfigmetadata.go index 50533ca5ca..b5139880a5 100644 --- a/typedapi/types/trainedmodelconfigmetadata.go +++ b/typedapi/types/trainedmodelconfigmetadata.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TrainedModelConfigMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L207-L215 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L207-L215 type TrainedModelConfigMetadata struct { // FeatureImportanceBaseline An object that contains the baseline for feature importance values. For // regression analysis, it is a single value. For classification analysis, there diff --git a/typedapi/types/trainedmodeldeploymentallocationstatus.go b/typedapi/types/trainedmodeldeploymentallocationstatus.go index 9ca0335d7c..476fa0bdad 100644 --- a/typedapi/types/trainedmodeldeploymentallocationstatus.go +++ b/typedapi/types/trainedmodeldeploymentallocationstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // TrainedModelDeploymentAllocationStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L394-L401 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L394-L401 type TrainedModelDeploymentAllocationStatus struct { // AllocationCount The current number of nodes where the model is allocated. AllocationCount int `json:"allocation_count"` @@ -65,7 +66,7 @@ func (s *TrainedModelDeploymentAllocationStatus) UnmarshalJSON(data []byte) erro case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllocationCount", err) } s.AllocationCount = value case float64: @@ -75,7 +76,7 @@ func (s *TrainedModelDeploymentAllocationStatus) UnmarshalJSON(data []byte) erro case "state": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } case "target_allocation_count": @@ -86,7 +87,7 @@ func (s *TrainedModelDeploymentAllocationStatus) UnmarshalJSON(data []byte) erro case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TargetAllocationCount", err) } s.TargetAllocationCount = value case float64: diff --git a/typedapi/types/trainedmodeldeploymentnodesstats.go b/typedapi/types/trainedmodeldeploymentnodesstats.go index 2b63a78aee..1652b84f1b 100644 --- a/typedapi/types/trainedmodeldeploymentnodesstats.go +++ b/typedapi/types/trainedmodeldeploymentnodesstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TrainedModelDeploymentNodesStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L133-L163 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L133-L163 type TrainedModelDeploymentNodesStats struct { // AverageInferenceTimeMs The average time for each inference call to complete on this node. AverageInferenceTimeMs Float64 `json:"average_inference_time_ms"` @@ -77,7 +78,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case "average_inference_time_ms": if err := dec.Decode(&s.AverageInferenceTimeMs); err != nil { - return err + return fmt.Errorf("%s | %w", "AverageInferenceTimeMs", err) } case "error_count": @@ -88,7 +89,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ErrorCount", err) } s.ErrorCount = value case float64: @@ -104,7 +105,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "InferenceCount", err) } s.InferenceCount = value case float64: @@ -119,7 +120,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LastAccess", err) } s.LastAccess = value case float64: @@ -129,7 +130,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case "node": if err := dec.Decode(&s.Node); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } case "number_of_allocations": @@ -140,7 +141,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfAllocations", err) } s.NumberOfAllocations = value case float64: @@ -156,7 +157,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfPendingRequests", err) } s.NumberOfPendingRequests = value case float64: @@ -172,7 +173,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RejectionExecutionCount", err) } s.RejectionExecutionCount = value case float64: @@ -182,12 +183,12 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case "routing_state": if err := dec.Decode(&s.RoutingState); err != nil { - return err + return fmt.Errorf("%s | %w", "RoutingState", err) } case "start_time": if err := dec.Decode(&s.StartTime); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTime", err) } case "threads_per_allocation": @@ -198,7 +199,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ThreadsPerAllocation", err) } s.ThreadsPerAllocation = value case float64: @@ -214,7 +215,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeoutCount", err) } s.TimeoutCount = value case float64: diff --git a/typedapi/types/trainedmodeldeploymentstats.go b/typedapi/types/trainedmodeldeploymentstats.go index 424e8e552d..7ddc5a812c 100644 --- a/typedapi/types/trainedmodeldeploymentstats.go +++ b/typedapi/types/trainedmodeldeploymentstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // TrainedModelDeploymentStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L62-L102 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L62-L102 type TrainedModelDeploymentStats struct { // AllocationStatus The detailed allocation status for the deployment. AllocationStatus TrainedModelDeploymentAllocationStatus `json:"allocation_status"` @@ -88,17 +89,17 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case "allocation_status": if err := dec.Decode(&s.AllocationStatus); err != nil { - return err + return fmt.Errorf("%s | %w", "AllocationStatus", err) } case "cache_size": if err := dec.Decode(&s.CacheSize); err != nil { - return err + return fmt.Errorf("%s | %w", "CacheSize", err) } case "deployment_id": if err := dec.Decode(&s.DeploymentId); err != nil { - return err + return fmt.Errorf("%s | %w", "DeploymentId", err) } case "error_count": @@ -109,7 +110,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ErrorCount", err) } s.ErrorCount = value case float64: @@ -125,7 +126,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "InferenceCount", err) } s.InferenceCount = value case float64: @@ -135,12 +136,12 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case "model_id": if err := dec.Decode(&s.ModelId); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelId", err) } case "nodes": if err := dec.Decode(&s.Nodes); err != nil { - return err + return fmt.Errorf("%s | %w", "Nodes", err) } case "number_of_allocations": @@ -151,7 +152,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumberOfAllocations", err) } s.NumberOfAllocations = value case float64: @@ -167,7 +168,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "QueueCapacity", err) } s.QueueCapacity = value case float64: @@ -178,7 +179,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -195,7 +196,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RejectedExecutionCount", err) } s.RejectedExecutionCount = value case float64: @@ -205,12 +206,12 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case "start_time": if err := dec.Decode(&s.StartTime); err != nil { - return err + return fmt.Errorf("%s | %w", "StartTime", err) } case "state": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } case "threads_per_allocation": @@ -221,7 +222,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ThreadsPerAllocation", err) } s.ThreadsPerAllocation = value case float64: @@ -237,7 +238,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeoutCount", err) } s.TimeoutCount = value case float64: diff --git a/typedapi/types/trainedmodelentities.go b/typedapi/types/trainedmodelentities.go index 8e9d851837..fa99eb4b3b 100644 --- a/typedapi/types/trainedmodelentities.go +++ b/typedapi/types/trainedmodelentities.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TrainedModelEntities type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L433-L439 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L433-L439 type TrainedModelEntities struct { ClassName string `json:"class_name"` ClassProbability Float64 `json:"class_probability"` @@ -57,7 +58,7 @@ func (s *TrainedModelEntities) UnmarshalJSON(data []byte) error { case "class_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -73,7 +74,7 @@ func (s *TrainedModelEntities) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ClassProbability", err) } f := Float64(value) s.ClassProbability = f @@ -90,7 +91,7 @@ func (s *TrainedModelEntities) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "EndPos", err) } s.EndPos = value case float64: @@ -101,7 +102,7 @@ func (s *TrainedModelEntities) UnmarshalJSON(data []byte) error { case "entity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Entity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -118,7 +119,7 @@ func (s *TrainedModelEntities) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "StartPos", err) } s.StartPos = value case float64: diff --git a/typedapi/types/trainedmodelinferenceclassimportance.go b/typedapi/types/trainedmodelinferenceclassimportance.go index 04813f1abe..6e40185ba0 100644 --- a/typedapi/types/trainedmodelinferenceclassimportance.go +++ b/typedapi/types/trainedmodelinferenceclassimportance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TrainedModelInferenceClassImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L446-L449 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L446-L449 type TrainedModelInferenceClassImportance struct { ClassName string `json:"class_name"` Importance Float64 `json:"importance"` @@ -54,7 +55,7 @@ func (s *TrainedModelInferenceClassImportance) UnmarshalJSON(data []byte) error case "class_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -70,7 +71,7 @@ func (s *TrainedModelInferenceClassImportance) UnmarshalJSON(data []byte) error case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Importance", err) } f := Float64(value) s.Importance = f diff --git a/typedapi/types/trainedmodelinferencefeatureimportance.go b/typedapi/types/trainedmodelinferencefeatureimportance.go index ed106f78ef..4f9303ff8f 100644 --- a/typedapi/types/trainedmodelinferencefeatureimportance.go +++ b/typedapi/types/trainedmodelinferencefeatureimportance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TrainedModelInferenceFeatureImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L451-L455 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L451-L455 type TrainedModelInferenceFeatureImportance struct { Classes []TrainedModelInferenceClassImportance `json:"classes,omitempty"` FeatureName string `json:"feature_name"` @@ -54,13 +55,13 @@ func (s *TrainedModelInferenceFeatureImportance) UnmarshalJSON(data []byte) erro case "classes": if err := dec.Decode(&s.Classes); err != nil { - return err + return fmt.Errorf("%s | %w", "Classes", err) } case "feature_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureName", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -76,7 +77,7 @@ func (s *TrainedModelInferenceFeatureImportance) UnmarshalJSON(data []byte) erro case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Importance", err) } f := Float64(value) s.Importance = &f diff --git a/typedapi/types/trainedmodelinferencestats.go b/typedapi/types/trainedmodelinferencestats.go index 9ca6c096e3..c64e73d1ea 100644 --- a/typedapi/types/trainedmodelinferencestats.go +++ b/typedapi/types/trainedmodelinferencestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TrainedModelInferenceStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L104-L124 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L104-L124 type TrainedModelInferenceStats struct { // CacheMissCount The number of times the model was loaded for inference and was not retrieved // from the cache. @@ -74,7 +75,7 @@ func (s *TrainedModelInferenceStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CacheMissCount", err) } s.CacheMissCount = value case float64: @@ -90,7 +91,7 @@ func (s *TrainedModelInferenceStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FailureCount", err) } s.FailureCount = value case float64: @@ -106,7 +107,7 @@ func (s *TrainedModelInferenceStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "InferenceCount", err) } s.InferenceCount = value case float64: @@ -122,7 +123,7 @@ func (s *TrainedModelInferenceStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MissingAllFieldsCount", err) } s.MissingAllFieldsCount = value case float64: @@ -132,7 +133,7 @@ func (s *TrainedModelInferenceStats) UnmarshalJSON(data []byte) error { case "timestamp": if err := dec.Decode(&s.Timestamp); err != nil { - return err + return fmt.Errorf("%s | %w", "Timestamp", err) } } diff --git a/typedapi/types/trainedmodellocation.go b/typedapi/types/trainedmodellocation.go index 697b490932..7340b75896 100644 --- a/typedapi/types/trainedmodellocation.go +++ b/typedapi/types/trainedmodellocation.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TrainedModelLocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L420-L422 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L420-L422 type TrainedModelLocation struct { Index TrainedModelLocationIndex `json:"index"` } diff --git a/typedapi/types/trainedmodellocationindex.go b/typedapi/types/trainedmodellocationindex.go index 814058e465..aa285d35b7 100644 --- a/typedapi/types/trainedmodellocationindex.go +++ b/typedapi/types/trainedmodellocationindex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TrainedModelLocationIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L424-L426 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L424-L426 type TrainedModelLocationIndex struct { Name string `json:"name"` } @@ -51,7 +52,7 @@ func (s *TrainedModelLocationIndex) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } } diff --git a/typedapi/types/trainedmodelprefixstrings.go b/typedapi/types/trainedmodelprefixstrings.go index cd0f4d9103..99216e2575 100644 --- a/typedapi/types/trainedmodelprefixstrings.go +++ b/typedapi/types/trainedmodelprefixstrings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,18 +24,19 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TrainedModelPrefixStrings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L428-L437 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L428-L437 type TrainedModelPrefixStrings struct { // Ingest String prepended to input at ingest - Ingest string `json:"ingest"` + Ingest *string `json:"ingest,omitempty"` // Search String prepended to input at search - Search string `json:"search"` + Search *string `json:"search,omitempty"` } func (s *TrainedModelPrefixStrings) UnmarshalJSON(data []byte) error { @@ -56,26 +57,26 @@ func (s *TrainedModelPrefixStrings) UnmarshalJSON(data []byte) error { case "ingest": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Ingest", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) if err != nil { o = string(tmp[:]) } - s.Ingest = o + s.Ingest = &o case "search": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Search", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) if err != nil { o = string(tmp[:]) } - s.Search = o + s.Search = &o } } diff --git a/typedapi/types/trainedmodelsizestats.go b/typedapi/types/trainedmodelsizestats.go index 094e22f060..3e93ad9925 100644 --- a/typedapi/types/trainedmodelsizestats.go +++ b/typedapi/types/trainedmodelsizestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TrainedModelSizeStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L126-L131 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L126-L131 type TrainedModelSizeStats struct { // ModelSizeBytes The size of the model in bytes. ModelSizeBytes ByteSize `json:"model_size_bytes"` @@ -55,7 +56,7 @@ func (s *TrainedModelSizeStats) UnmarshalJSON(data []byte) error { case "model_size_bytes": if err := dec.Decode(&s.ModelSizeBytes); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSizeBytes", err) } case "required_native_memory_bytes": @@ -66,7 +67,7 @@ func (s *TrainedModelSizeStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RequiredNativeMemoryBytes", err) } s.RequiredNativeMemoryBytes = value case float64: diff --git a/typedapi/types/trainedmodelsrecord.go b/typedapi/types/trainedmodelsrecord.go index 430816dbaa..0fcc6c337e 100644 --- a/typedapi/types/trainedmodelsrecord.go +++ b/typedapi/types/trainedmodelsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TrainedModelsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/ml_trained_models/types.ts#L23-L115 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/ml_trained_models/types.ts#L23-L115 type TrainedModelsRecord struct { // CreateTime The time the model was created. CreateTime DateTime `json:"create_time,omitempty"` @@ -88,13 +89,13 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "create_time", "ct": if err := dec.Decode(&s.CreateTime); err != nil { - return err + return fmt.Errorf("%s | %w", "CreateTime", err) } case "created_by", "c", "createdBy": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CreatedBy", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -106,7 +107,7 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "data_frame.analysis", "dfa", "dataFrameAnalyticsAnalysis": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataFrameAnalysis", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -118,7 +119,7 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "data_frame.create_time", "dft", "dataFrameAnalyticsTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataFrameCreateTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -130,7 +131,7 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "data_frame.id", "dfid", "dataFrameAnalytics": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataFrameId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -142,7 +143,7 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "data_frame.source_index", "dfsi", "dataFrameAnalyticsSrcIndex": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DataFrameSourceIndex", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -154,7 +155,7 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "description", "d": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -165,18 +166,18 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "heap_size", "hs", "modelHeapSize": if err := dec.Decode(&s.HeapSize); err != nil { - return err + return fmt.Errorf("%s | %w", "HeapSize", err) } case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "ingest.count", "ic", "ingestCount": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IngestCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -188,7 +189,7 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "ingest.current", "icurr", "ingestCurrent": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IngestCurrent", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -200,7 +201,7 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "ingest.failed", "if", "ingestFailed": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IngestFailed", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -212,7 +213,7 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "ingest.pipelines", "ip", "ingestPipelines": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IngestPipelines", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -224,7 +225,7 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "ingest.time", "it", "ingestTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IngestTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -236,7 +237,7 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "license", "l": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "License", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -248,7 +249,7 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "operations", "o", "modelOperations": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Operations", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -260,7 +261,7 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -271,7 +272,7 @@ func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { case "version", "v": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/trainedmodelstats.go b/typedapi/types/trainedmodelstats.go index aea517d787..2b1dcc860a 100644 --- a/typedapi/types/trainedmodelstats.go +++ b/typedapi/types/trainedmodelstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TrainedModelStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/TrainedModel.ts#L42-L60 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/TrainedModel.ts#L42-L60 type TrainedModelStats struct { // DeploymentStats A collection of deployment stats, which is present when the models are // deployed. @@ -66,12 +67,12 @@ func (s *TrainedModelStats) UnmarshalJSON(data []byte) error { case "deployment_stats": if err := dec.Decode(&s.DeploymentStats); err != nil { - return err + return fmt.Errorf("%s | %w", "DeploymentStats", err) } case "inference_stats": if err := dec.Decode(&s.InferenceStats); err != nil { - return err + return fmt.Errorf("%s | %w", "InferenceStats", err) } case "ingest": @@ -79,17 +80,17 @@ func (s *TrainedModelStats) UnmarshalJSON(data []byte) error { s.Ingest = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Ingest); err != nil { - return err + return fmt.Errorf("%s | %w", "Ingest", err) } case "model_id": if err := dec.Decode(&s.ModelId); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelId", err) } case "model_size_stats": if err := dec.Decode(&s.ModelSizeStats); err != nil { - return err + return fmt.Errorf("%s | %w", "ModelSizeStats", err) } case "pipeline_count": @@ -100,7 +101,7 @@ func (s *TrainedModelStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "PipelineCount", err) } s.PipelineCount = value case float64: diff --git a/typedapi/types/trainedmodeltree.go b/typedapi/types/trainedmodeltree.go index 46f7e61931..43bf2ee7b2 100644 --- a/typedapi/types/trainedmodeltree.go +++ b/typedapi/types/trainedmodeltree.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TrainedModelTree type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/types.ts#L74-L79 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/types.ts#L74-L79 type TrainedModelTree struct { ClassificationLabels []string `json:"classification_labels,omitempty"` FeatureNames []string `json:"feature_names"` @@ -55,18 +56,18 @@ func (s *TrainedModelTree) UnmarshalJSON(data []byte) error { case "classification_labels": if err := dec.Decode(&s.ClassificationLabels); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassificationLabels", err) } case "feature_names": if err := dec.Decode(&s.FeatureNames); err != nil { - return err + return fmt.Errorf("%s | %w", "FeatureNames", err) } case "target_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -77,7 +78,7 @@ func (s *TrainedModelTree) UnmarshalJSON(data []byte) error { case "tree_structure": if err := dec.Decode(&s.TreeStructure); err != nil { - return err + return fmt.Errorf("%s | %w", "TreeStructure", err) } } diff --git a/typedapi/types/trainedmodeltreenode.go b/typedapi/types/trainedmodeltreenode.go index 6d1e73e645..65dcee89ea 100644 --- a/typedapi/types/trainedmodeltreenode.go +++ b/typedapi/types/trainedmodeltreenode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TrainedModelTreeNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/types.ts#L81-L91 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/types.ts#L81-L91 type TrainedModelTreeNode struct { DecisionType *string `json:"decision_type,omitempty"` DefaultLeft *bool `json:"default_left,omitempty"` @@ -61,7 +62,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case "decision_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DecisionType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -77,7 +78,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DefaultLeft", err) } s.DefaultLeft = &value case bool: @@ -91,7 +92,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LeafValue", err) } f := Float64(value) s.LeafValue = &f @@ -108,7 +109,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "LeftChild", err) } s.LeftChild = &value case float64: @@ -124,7 +125,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NodeIndex", err) } s.NodeIndex = value case float64: @@ -140,7 +141,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RightChild", err) } s.RightChild = &value case float64: @@ -156,7 +157,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SplitFeature", err) } s.SplitFeature = &value case float64: @@ -172,7 +173,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SplitGain", err) } s.SplitGain = &value case float64: @@ -187,7 +188,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Threshold", err) } f := Float64(value) s.Threshold = &f diff --git a/typedapi/types/transformauthorization.go b/typedapi/types/transformauthorization.go index c123fe5e71..2add56ee43 100644 --- a/typedapi/types/transformauthorization.go +++ b/typedapi/types/transformauthorization.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TransformAuthorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/Authorization.ts#L59-L71 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/Authorization.ts#L59-L71 type TransformAuthorization struct { // ApiKey If an API key was used for the most recent update to the transform, its name // and identifier are listed in the response. @@ -60,18 +61,18 @@ func (s *TransformAuthorization) UnmarshalJSON(data []byte) error { case "api_key": if err := dec.Decode(&s.ApiKey); err != nil { - return err + return fmt.Errorf("%s | %w", "ApiKey", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "service_account": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ServiceAccount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/transformcontainer.go b/typedapi/types/transformcontainer.go index 94e73ab281..926299d4fb 100644 --- a/typedapi/types/transformcontainer.go +++ b/typedapi/types/transformcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TransformContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Transform.ts#L27-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Transform.ts#L27-L34 type TransformContainer struct { Chain []TransformContainer `json:"chain,omitempty"` Script *ScriptTransform `json:"script,omitempty"` diff --git a/typedapi/types/transformdestination.go b/typedapi/types/transformdestination.go index 589e008d1f..81be74c0b1 100644 --- a/typedapi/types/transformdestination.go +++ b/typedapi/types/transformdestination.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TransformDestination type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/_types/Transform.ts#L34-L45 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/_types/Transform.ts#L34-L45 type TransformDestination struct { // Index The destination index for the transform. The mappings of the destination // index are deduced based on the source @@ -59,13 +60,13 @@ func (s *TransformDestination) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "pipeline": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pipeline", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/transformindexerstats.go b/typedapi/types/transformindexerstats.go index 12d577ef7c..f556a5bf24 100644 --- a/typedapi/types/transformindexerstats.go +++ b/typedapi/types/transformindexerstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TransformIndexerStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/get_transform_stats/types.ts#L56-L74 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/get_transform_stats/types.ts#L56-L74 type TransformIndexerStats struct { DeleteTimeInMs *int64 `json:"delete_time_in_ms,omitempty"` DocumentsDeleted *int64 `json:"documents_deleted,omitempty"` @@ -68,7 +69,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case "delete_time_in_ms": if err := dec.Decode(&s.DeleteTimeInMs); err != nil { - return err + return fmt.Errorf("%s | %w", "DeleteTimeInMs", err) } case "documents_deleted": @@ -78,7 +79,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocumentsDeleted", err) } s.DocumentsDeleted = &value case float64: @@ -93,7 +94,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocumentsIndexed", err) } s.DocumentsIndexed = value case float64: @@ -108,7 +109,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocumentsProcessed", err) } s.DocumentsProcessed = value case float64: @@ -118,7 +119,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case "exponential_avg_checkpoint_duration_ms": if err := dec.Decode(&s.ExponentialAvgCheckpointDurationMs); err != nil { - return err + return fmt.Errorf("%s | %w", "ExponentialAvgCheckpointDurationMs", err) } case "exponential_avg_documents_indexed": @@ -128,7 +129,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ExponentialAvgDocumentsIndexed", err) } f := Float64(value) s.ExponentialAvgDocumentsIndexed = f @@ -144,7 +145,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ExponentialAvgDocumentsProcessed", err) } f := Float64(value) s.ExponentialAvgDocumentsProcessed = f @@ -160,7 +161,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexFailures", err) } s.IndexFailures = value case float64: @@ -170,7 +171,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case "index_time_in_ms": if err := dec.Decode(&s.IndexTimeInMs); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexTimeInMs", err) } case "index_total": @@ -180,7 +181,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IndexTotal", err) } s.IndexTotal = value case float64: @@ -195,7 +196,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PagesProcessed", err) } s.PagesProcessed = value case float64: @@ -205,7 +206,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case "processing_time_in_ms": if err := dec.Decode(&s.ProcessingTimeInMs); err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessingTimeInMs", err) } case "processing_total": @@ -215,7 +216,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessingTotal", err) } s.ProcessingTotal = value case float64: @@ -230,7 +231,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SearchFailures", err) } s.SearchFailures = value case float64: @@ -240,7 +241,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case "search_time_in_ms": if err := dec.Decode(&s.SearchTimeInMs); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchTimeInMs", err) } case "search_total": @@ -250,7 +251,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SearchTotal", err) } s.SearchTotal = value case float64: @@ -265,7 +266,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TriggerCount", err) } s.TriggerCount = value case float64: diff --git a/typedapi/types/transformprogress.go b/typedapi/types/transformprogress.go index a9c7cb66e9..fdb6d40a36 100644 --- a/typedapi/types/transformprogress.go +++ b/typedapi/types/transformprogress.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TransformProgress type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/get_transform_stats/types.ts#L48-L54 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/get_transform_stats/types.ts#L48-L54 type TransformProgress struct { DocsIndexed int64 `json:"docs_indexed"` DocsProcessed int64 `json:"docs_processed"` @@ -61,7 +62,7 @@ func (s *TransformProgress) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocsIndexed", err) } s.DocsIndexed = value case float64: @@ -76,7 +77,7 @@ func (s *TransformProgress) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocsProcessed", err) } s.DocsProcessed = value case float64: @@ -91,7 +92,7 @@ func (s *TransformProgress) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocsRemaining", err) } s.DocsRemaining = value case float64: @@ -106,7 +107,7 @@ func (s *TransformProgress) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PercentComplete", err) } f := Float64(value) s.PercentComplete = f @@ -122,7 +123,7 @@ func (s *TransformProgress) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalDocs", err) } s.TotalDocs = value case float64: diff --git a/typedapi/types/transformsource.go b/typedapi/types/transformsource.go index e35629060b..a325bafc74 100644 --- a/typedapi/types/transformsource.go +++ b/typedapi/types/transformsource.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TransformSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/_types/Transform.ts#L146-L165 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/_types/Transform.ts#L146-L165 type TransformSource struct { // Index The source indices for the transform. It can be a single index, an index // pattern (for example, `"my-index-*""`), an @@ -69,24 +70,24 @@ func (s *TransformSource) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = append(s.Index, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { - return err + return fmt.Errorf("%s | %w", "RuntimeMappings", err) } } diff --git a/typedapi/types/transformsrecord.go b/typedapi/types/transformsrecord.go index 764745b9c5..0b7203cf85 100644 --- a/typedapi/types/transformsrecord.go +++ b/typedapi/types/transformsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TransformsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cat/transforms/types.ts#L22-L197 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cat/transforms/types.ts#L22-L197 type TransformsRecord struct { // ChangesLastDetectionTime The timestamp when changes were last detected in the source indices. ChangesLastDetectionTime string `json:"changes_last_detection_time,omitempty"` @@ -142,7 +143,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "changes_last_detection_time", "cldt": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ChangesLastDetectionTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -154,7 +155,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "checkpoint", "c": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Checkpoint", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -166,7 +167,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "checkpoint_duration_time_exp_avg", "cdtea", "checkpointTimeExpAvg": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CheckpointDurationTimeExpAvg", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -178,7 +179,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "checkpoint_progress", "cp", "checkpointProgress": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CheckpointProgress", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -190,7 +191,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "create_time", "ct", "createTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "CreateTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -202,7 +203,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "delete_time", "dtime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DeleteTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -214,7 +215,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "description", "d": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -226,7 +227,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "dest_index", "di", "destIndex": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DestIndex", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -238,7 +239,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "docs_per_second", "dps": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DocsPerSecond", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -250,7 +251,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "documents_deleted", "docd": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DocumentsDeleted", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -262,7 +263,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "documents_indexed", "doci": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DocumentsIndexed", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -274,7 +275,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "documents_processed", "docp", "documentsProcessed": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "DocumentsProcessed", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -286,7 +287,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "frequency", "f": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Frequency", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -297,13 +298,13 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "index_failure", "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexFailure", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -315,7 +316,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "index_time", "itime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -327,7 +328,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "index_total", "it": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -339,7 +340,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "indexed_documents_exp_avg", "idea": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexedDocumentsExpAvg", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -351,7 +352,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "last_search_time", "lst", "lastSearchTime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LastSearchTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -363,7 +364,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "max_page_search_size", "mpsz": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxPageSearchSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -375,7 +376,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "pages_processed", "pp": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "PagesProcessed", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -387,7 +388,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "pipeline", "p": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pipeline", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -399,7 +400,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "processed_documents_exp_avg", "pdea": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessedDocumentsExpAvg", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -411,7 +412,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "processing_time", "pt": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ProcessingTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -423,7 +424,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "reason", "r": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -435,7 +436,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "search_failure", "sf": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchFailure", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -447,7 +448,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "search_time", "stime": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchTime", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -459,7 +460,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "search_total", "st": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchTotal", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -471,7 +472,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "source_index", "si", "sourceIndex": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "SourceIndex", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -483,7 +484,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "state", "s": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -495,7 +496,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "transform_type", "tt": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TransformType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -507,7 +508,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "trigger_count", "tc": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TriggerCount", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -518,7 +519,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { case "version", "v": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/transformstats.go b/typedapi/types/transformstats.go index 7d82acfc93..fd78bbca27 100644 --- a/typedapi/types/transformstats.go +++ b/typedapi/types/transformstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TransformStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/get_transform_stats/types.ts#L31-L42 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/get_transform_stats/types.ts#L31-L42 type TransformStats struct { Checkpointing Checkpointing `json:"checkpointing"` Health *TransformStatsHealth `json:"health,omitempty"` @@ -58,28 +59,28 @@ func (s *TransformStats) UnmarshalJSON(data []byte) error { case "checkpointing": if err := dec.Decode(&s.Checkpointing); err != nil { - return err + return fmt.Errorf("%s | %w", "Checkpointing", err) } case "health": if err := dec.Decode(&s.Health); err != nil { - return err + return fmt.Errorf("%s | %w", "Health", err) } case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "node": if err := dec.Decode(&s.Node); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } case "reason": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -91,7 +92,7 @@ func (s *TransformStats) UnmarshalJSON(data []byte) error { case "state": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -102,7 +103,7 @@ func (s *TransformStats) UnmarshalJSON(data []byte) error { case "stats": if err := dec.Decode(&s.Stats); err != nil { - return err + return fmt.Errorf("%s | %w", "Stats", err) } } diff --git a/typedapi/types/transformstatshealth.go b/typedapi/types/transformstatshealth.go index 03ad45e3c1..a975e8245d 100644 --- a/typedapi/types/transformstatshealth.go +++ b/typedapi/types/transformstatshealth.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // TransformStatsHealth type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/get_transform_stats/types.ts#L44-L46 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/get_transform_stats/types.ts#L44-L46 type TransformStatsHealth struct { Status healthstatus.HealthStatus `json:"status"` } diff --git a/typedapi/types/transformsummary.go b/typedapi/types/transformsummary.go index faaf715f85..fee6816fe6 100644 --- a/typedapi/types/transformsummary.go +++ b/typedapi/types/transformsummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TransformSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/transform/get_transform/types.ts#L33-L61 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/transform/get_transform/types.ts#L33-L61 type TransformSummary struct { // Authorization The security privileges that the transform uses to run its queries. If // Elastic Stack security features were disabled at the time of the most recent @@ -77,18 +78,18 @@ func (s *TransformSummary) UnmarshalJSON(data []byte) error { case "authorization": if err := dec.Decode(&s.Authorization); err != nil { - return err + return fmt.Errorf("%s | %w", "Authorization", err) } case "create_time": if err := dec.Decode(&s.CreateTime); err != nil { - return err + return fmt.Errorf("%s | %w", "CreateTime", err) } case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -99,57 +100,57 @@ func (s *TransformSummary) UnmarshalJSON(data []byte) error { case "dest": if err := dec.Decode(&s.Dest); err != nil { - return err + return fmt.Errorf("%s | %w", "Dest", err) } case "frequency": if err := dec.Decode(&s.Frequency); err != nil { - return err + return fmt.Errorf("%s | %w", "Frequency", err) } case "id": if err := dec.Decode(&s.Id); err != nil { - return err + return fmt.Errorf("%s | %w", "Id", err) } case "latest": if err := dec.Decode(&s.Latest); err != nil { - return err + return fmt.Errorf("%s | %w", "Latest", err) } case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "pivot": if err := dec.Decode(&s.Pivot); err != nil { - return err + return fmt.Errorf("%s | %w", "Pivot", err) } case "retention_policy": if err := dec.Decode(&s.RetentionPolicy); err != nil { - return err + return fmt.Errorf("%s | %w", "RetentionPolicy", err) } case "settings": if err := dec.Decode(&s.Settings); err != nil { - return err + return fmt.Errorf("%s | %w", "Settings", err) } case "source": if err := dec.Decode(&s.Source); err != nil { - return err + return fmt.Errorf("%s | %w", "Source", err) } case "sync": if err := dec.Decode(&s.Sync); err != nil { - return err + return fmt.Errorf("%s | %w", "Sync", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/transientmetadataconfig.go b/typedapi/types/transientmetadataconfig.go deleted file mode 100644 index 1a2ee42a5e..0000000000 --- a/typedapi/types/transientmetadataconfig.go +++ /dev/null @@ -1,77 +0,0 @@ -// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 - -package types - -import ( - "bytes" - "encoding/json" - "errors" - "io" - "strconv" -) - -// TransientMetadataConfig type. -// -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/TransientMetadataConfig.ts#L20-L22 -type TransientMetadataConfig struct { - Enabled bool `json:"enabled"` -} - -func (s *TransientMetadataConfig) UnmarshalJSON(data []byte) error { - - dec := json.NewDecoder(bytes.NewReader(data)) - - for { - t, err := dec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "enabled": - var tmp interface{} - dec.Decode(&tmp) - switch v := tmp.(type) { - case string: - value, err := strconv.ParseBool(v) - if err != nil { - return err - } - s.Enabled = value - case bool: - s.Enabled = v - } - - } - } - return nil -} - -// NewTransientMetadataConfig returns a TransientMetadataConfig. -func NewTransientMetadataConfig() *TransientMetadataConfig { - r := &TransientMetadataConfig{} - - return r -} diff --git a/typedapi/types/translog.go b/typedapi/types/translog.go index ca5d52d2f5..23924a3200 100644 --- a/typedapi/types/translog.go +++ b/typedapi/types/translog.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/translogdurability" @@ -31,7 +32,7 @@ import ( // Translog type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L335-L357 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L339-L361 type Translog struct { // Durability Whether or not to `fsync` and commit the translog after every index, delete, // update, or bulk request. @@ -71,22 +72,22 @@ func (s *Translog) UnmarshalJSON(data []byte) error { case "durability": if err := dec.Decode(&s.Durability); err != nil { - return err + return fmt.Errorf("%s | %w", "Durability", err) } case "flush_threshold_size": if err := dec.Decode(&s.FlushThresholdSize); err != nil { - return err + return fmt.Errorf("%s | %w", "FlushThresholdSize", err) } case "retention": if err := dec.Decode(&s.Retention); err != nil { - return err + return fmt.Errorf("%s | %w", "Retention", err) } case "sync_interval": if err := dec.Decode(&s.SyncInterval); err != nil { - return err + return fmt.Errorf("%s | %w", "SyncInterval", err) } } diff --git a/typedapi/types/translogretention.go b/typedapi/types/translogretention.go index 00faecd0e8..d8960de701 100644 --- a/typedapi/types/translogretention.go +++ b/typedapi/types/translogretention.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TranslogRetention type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/_types/IndexSettings.ts#L376-L395 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/_types/IndexSettings.ts#L380-L399 type TranslogRetention struct { // Age This controls the maximum duration for which translog files are kept by each // shard. Keeping more @@ -70,12 +71,12 @@ func (s *TranslogRetention) UnmarshalJSON(data []byte) error { case "age": if err := dec.Decode(&s.Age); err != nil { - return err + return fmt.Errorf("%s | %w", "Age", err) } case "size": if err := dec.Decode(&s.Size); err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } } diff --git a/typedapi/types/translogstats.go b/typedapi/types/translogstats.go index 51d0685e91..5306f3558a 100644 --- a/typedapi/types/translogstats.go +++ b/typedapi/types/translogstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TranslogStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L397-L405 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L397-L405 type TranslogStats struct { EarliestLastModifiedAge int64 `json:"earliest_last_modified_age"` Operations int64 `json:"operations"` @@ -63,7 +64,7 @@ func (s *TranslogStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "EarliestLastModifiedAge", err) } s.EarliestLastModifiedAge = value case float64: @@ -78,7 +79,7 @@ func (s *TranslogStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Operations", err) } s.Operations = value case float64: @@ -89,7 +90,7 @@ func (s *TranslogStats) UnmarshalJSON(data []byte) error { case "size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +106,7 @@ func (s *TranslogStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SizeInBytes", err) } s.SizeInBytes = value case float64: @@ -121,7 +122,7 @@ func (s *TranslogStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "UncommittedOperations", err) } s.UncommittedOperations = value case float64: @@ -132,7 +133,7 @@ func (s *TranslogStats) UnmarshalJSON(data []byte) error { case "uncommitted_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "UncommittedSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -148,7 +149,7 @@ func (s *TranslogStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "UncommittedSizeInBytes", err) } s.UncommittedSizeInBytes = value case float64: diff --git a/typedapi/types/translogstatus.go b/typedapi/types/translogstatus.go index a43f3ae310..f0f4896235 100644 --- a/typedapi/types/translogstatus.go +++ b/typedapi/types/translogstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TranslogStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/recovery/types.ts#L102-L109 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/recovery/types.ts#L102-L109 type TranslogStatus struct { Percent Percentage `json:"percent"` Recovered int64 `json:"recovered"` @@ -57,7 +58,7 @@ func (s *TranslogStatus) UnmarshalJSON(data []byte) error { case "percent": if err := dec.Decode(&s.Percent); err != nil { - return err + return fmt.Errorf("%s | %w", "Percent", err) } case "recovered": @@ -67,7 +68,7 @@ func (s *TranslogStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Recovered", err) } s.Recovered = value case float64: @@ -82,7 +83,7 @@ func (s *TranslogStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: @@ -97,7 +98,7 @@ func (s *TranslogStatus) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalOnStart", err) } s.TotalOnStart = value case float64: @@ -107,12 +108,12 @@ func (s *TranslogStatus) UnmarshalJSON(data []byte) error { case "total_time": if err := dec.Decode(&s.TotalTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTime", err) } case "total_time_in_millis": if err := dec.Decode(&s.TotalTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTimeInMillis", err) } } diff --git a/typedapi/types/transport.go b/typedapi/types/transport.go index 0e42a72bb4..cce4a9ee9f 100644 --- a/typedapi/types/transport.go +++ b/typedapi/types/transport.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Transport type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L1047-L1090 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L1047-L1090 type Transport struct { // InboundHandlingTimeHistogram The distribution of the time spent handling each inbound message on a // transport thread, represented as a histogram. @@ -84,12 +85,12 @@ func (s *Transport) UnmarshalJSON(data []byte) error { case "inbound_handling_time_histogram": if err := dec.Decode(&s.InboundHandlingTimeHistogram); err != nil { - return err + return fmt.Errorf("%s | %w", "InboundHandlingTimeHistogram", err) } case "outbound_handling_time_histogram": if err := dec.Decode(&s.OutboundHandlingTimeHistogram); err != nil { - return err + return fmt.Errorf("%s | %w", "OutboundHandlingTimeHistogram", err) } case "rx_count": @@ -99,7 +100,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RxCount", err) } s.RxCount = &value case float64: @@ -110,7 +111,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { case "rx_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RxSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -126,7 +127,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "RxSizeInBytes", err) } s.RxSizeInBytes = &value case float64: @@ -142,7 +143,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ServerOpen", err) } s.ServerOpen = &value case float64: @@ -157,7 +158,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TotalOutboundConnections", err) } s.TotalOutboundConnections = &value case float64: @@ -172,7 +173,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TxCount", err) } s.TxCount = &value case float64: @@ -183,7 +184,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { case "tx_size": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TxSize", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -199,7 +200,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "TxSizeInBytes", err) } s.TxSizeInBytes = &value case float64: diff --git a/typedapi/types/transporthistogram.go b/typedapi/types/transporthistogram.go index 1089ef8c97..3db78e560d 100644 --- a/typedapi/types/transporthistogram.go +++ b/typedapi/types/transporthistogram.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TransportHistogram type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/nodes/_types/Stats.ts#L1092-L1106 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/nodes/_types/Stats.ts#L1092-L1106 type TransportHistogram struct { // Count The number of times a transport thread took a period of time within the // bounds of this bucket to handle an inbound message. @@ -65,7 +66,7 @@ func (s *TransportHistogram) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = &value case float64: @@ -80,7 +81,7 @@ func (s *TransportHistogram) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "GeMillis", err) } s.GeMillis = &value case float64: @@ -95,7 +96,7 @@ func (s *TransportHistogram) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LtMillis", err) } s.LtMillis = &value case float64: diff --git a/typedapi/types/triggercontainer.go b/typedapi/types/triggercontainer.go index ebc2206109..17ed7b6c76 100644 --- a/typedapi/types/triggercontainer.go +++ b/typedapi/types/triggercontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TriggerContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Trigger.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Trigger.ts#L23-L28 type TriggerContainer struct { Schedule *ScheduleContainer `json:"schedule,omitempty"` } diff --git a/typedapi/types/triggereventcontainer.go b/typedapi/types/triggereventcontainer.go index 05ed7d9404..c861cfeab0 100644 --- a/typedapi/types/triggereventcontainer.go +++ b/typedapi/types/triggereventcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TriggerEventContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Trigger.ts#L32-L37 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Trigger.ts#L32-L37 type TriggerEventContainer struct { Schedule *ScheduleTriggerEvent `json:"schedule,omitempty"` } diff --git a/typedapi/types/triggereventresult.go b/typedapi/types/triggereventresult.go index 0e6ad763f1..79a8298329 100644 --- a/typedapi/types/triggereventresult.go +++ b/typedapi/types/triggereventresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TriggerEventResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Trigger.ts#L39-L43 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Trigger.ts#L39-L43 type TriggerEventResult struct { Manual TriggerEventContainer `json:"manual"` TriggeredTime DateTime `json:"triggered_time"` @@ -54,18 +55,18 @@ func (s *TriggerEventResult) UnmarshalJSON(data []byte) error { case "manual": if err := dec.Decode(&s.Manual); err != nil { - return err + return fmt.Errorf("%s | %w", "Manual", err) } case "triggered_time": if err := dec.Decode(&s.TriggeredTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TriggeredTime", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/trimprocessor.go b/typedapi/types/trimprocessor.go index fbc9e6e559..1bce28f42a 100644 --- a/typedapi/types/trimprocessor.go +++ b/typedapi/types/trimprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TrimProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L1120-L1136 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L1120-L1136 type TrimProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -72,7 +73,7 @@ func (s *TrimProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,13 +84,13 @@ func (s *TrimProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +106,7 @@ func (s *TrimProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -119,7 +120,7 @@ func (s *TrimProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -128,13 +129,13 @@ func (s *TrimProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -145,7 +146,7 @@ func (s *TrimProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/trimtokenfilter.go b/typedapi/types/trimtokenfilter.go index 43a0f1f975..df7a2c7eb8 100644 --- a/typedapi/types/trimtokenfilter.go +++ b/typedapi/types/trimtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // TrimTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L326-L328 type TrimTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` @@ -52,12 +53,12 @@ func (s *TrimTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/truncatetokenfilter.go b/typedapi/types/truncatetokenfilter.go index 3873d85ca0..3ce60ba323 100644 --- a/typedapi/types/truncatetokenfilter.go +++ b/typedapi/types/truncatetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TruncateTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L330-L333 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L330-L333 type TruncateTokenFilter struct { Length *int `json:"length,omitempty"` Type string `json:"type,omitempty"` @@ -60,7 +61,7 @@ func (s *TruncateTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Length", err) } s.Length = &value case float64: @@ -70,12 +71,12 @@ func (s *TruncateTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/ttestaggregate.go b/typedapi/types/ttestaggregate.go index 2c536d6bf9..5b522d67cc 100644 --- a/typedapi/types/ttestaggregate.go +++ b/typedapi/types/ttestaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TTestAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L735-L739 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L735-L739 type TTestAggregate struct { Meta Metadata `json:"meta,omitempty"` Value Float64 `json:"value,omitempty"` @@ -54,18 +55,18 @@ func (s *TTestAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/ttestaggregation.go b/typedapi/types/ttestaggregation.go index 8fa5ddbdcf..bdc51a833d 100644 --- a/typedapi/types/ttestaggregation.go +++ b/typedapi/types/ttestaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // TTestAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L294-L308 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L294-L308 type TTestAggregation struct { // A Test population A. A *TestPopulation `json:"a,omitempty"` @@ -61,23 +62,23 @@ func (s *TTestAggregation) UnmarshalJSON(data []byte) error { case "a": if err := dec.Decode(&s.A); err != nil { - return err + return fmt.Errorf("%s | %w", "A", err) } case "b": if err := dec.Decode(&s.B); err != nil { - return err + return fmt.Errorf("%s | %w", "B", err) } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -88,7 +89,7 @@ func (s *TTestAggregation) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/typefieldmappings.go b/typedapi/types/typefieldmappings.go index 20ed8e27b3..5211edf2e9 100644 --- a/typedapi/types/typefieldmappings.go +++ b/typedapi/types/typefieldmappings.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // TypeFieldMappings type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/get_field_mapping/types.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/get_field_mapping/types.ts#L24-L26 type TypeFieldMappings struct { Mappings map[string]FieldMapping `json:"mappings"` } diff --git a/typedapi/types/typemapping.go b/typedapi/types/typemapping.go index 8a018870c6..502f3b6531 100644 --- a/typedapi/types/typemapping.go +++ b/typedapi/types/typemapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // TypeMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/TypeMapping.ts#L34-L56 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/TypeMapping.ts#L34-L57 type TypeMapping struct { AllField *AllField `json:"all_field,omitempty"` DataStreamTimestamp_ *DataStreamTimestamp `json:"_data_stream_timestamp,omitempty"` @@ -50,6 +51,7 @@ type TypeMapping struct { Runtime map[string]RuntimeField `json:"runtime,omitempty"` Size_ *SizeField `json:"_size,omitempty"` Source_ *SourceField `json:"_source,omitempty"` + Subobjects *bool `json:"subobjects,omitempty"` } func (s *TypeMapping) UnmarshalJSON(data []byte) error { @@ -69,12 +71,12 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { case "all_field": if err := dec.Decode(&s.AllField); err != nil { - return err + return fmt.Errorf("%s | %w", "AllField", err) } case "_data_stream_timestamp": if err := dec.Decode(&s.DataStreamTimestamp_); err != nil { - return err + return fmt.Errorf("%s | %w", "DataStreamTimestamp_", err) } case "date_detection": @@ -84,7 +86,7 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DateDetection", err) } s.DateDetection = &value case bool: @@ -93,17 +95,17 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "dynamic_date_formats": if err := dec.Decode(&s.DynamicDateFormats); err != nil { - return err + return fmt.Errorf("%s | %w", "DynamicDateFormats", err) } case "dynamic_templates": if err := dec.Decode(&s.DynamicTemplates); err != nil { - return err + return fmt.Errorf("%s | %w", "DynamicTemplates", err) } case "enabled": @@ -113,7 +115,7 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = &value case bool: @@ -122,17 +124,17 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { case "_field_names": if err := dec.Decode(&s.FieldNames_); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldNames_", err) } case "index_field": if err := dec.Decode(&s.IndexField); err != nil { - return err + return fmt.Errorf("%s | %w", "IndexField", err) } case "_meta": if err := dec.Decode(&s.Meta_); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta_", err) } case "numeric_detection": @@ -142,7 +144,7 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "NumericDetection", err) } s.NumericDetection = &value case bool: @@ -458,7 +460,7 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { case "_routing": if err := dec.Decode(&s.Routing_); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing_", err) } case "runtime": @@ -466,17 +468,31 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { s.Runtime = make(map[string]RuntimeField, 0) } if err := dec.Decode(&s.Runtime); err != nil { - return err + return fmt.Errorf("%s | %w", "Runtime", err) } case "_size": if err := dec.Decode(&s.Size_); err != nil { - return err + return fmt.Errorf("%s | %w", "Size_", err) } case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) + } + + case "subobjects": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Subobjects", err) + } + s.Subobjects = &value + case bool: + s.Subobjects = &v } } diff --git a/typedapi/types/typequery.go b/typedapi/types/typequery.go index 8cad5ba652..16aa7831fc 100644 --- a/typedapi/types/typequery.go +++ b/typedapi/types/typequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // TypeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L264-L266 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L264-L266 type TypeQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -64,7 +65,7 @@ func (s *TypeQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -76,7 +77,7 @@ func (s *TypeQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -88,7 +89,7 @@ func (s *TypeQuery) UnmarshalJSON(data []byte) error { case "value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/uaxemailurltokenizer.go b/typedapi/types/uaxemailurltokenizer.go index 676952d008..4840b8ba4d 100644 --- a/typedapi/types/uaxemailurltokenizer.go +++ b/typedapi/types/uaxemailurltokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // UaxEmailUrlTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L110-L113 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L110-L113 type UaxEmailUrlTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Type string `json:"type,omitempty"` @@ -60,7 +61,7 @@ func (s *UaxEmailUrlTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTokenLength", err) } s.MaxTokenLength = &value case float64: @@ -70,12 +71,12 @@ func (s *UaxEmailUrlTokenizer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/unassignedinformation.go b/typedapi/types/unassignedinformation.go index d3aefb1d35..e8bef4346d 100644 --- a/typedapi/types/unassignedinformation.go +++ b/typedapi/types/unassignedinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // UnassignedInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/cluster/allocation_explain/types.ts#L117-L125 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/cluster/allocation_explain/types.ts#L117-L125 type UnassignedInformation struct { AllocationStatus *string `json:"allocation_status,omitempty"` At DateTime `json:"at"` @@ -61,7 +62,7 @@ func (s *UnassignedInformation) UnmarshalJSON(data []byte) error { case "allocation_status": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "AllocationStatus", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -72,7 +73,7 @@ func (s *UnassignedInformation) UnmarshalJSON(data []byte) error { case "at": if err := dec.Decode(&s.At); err != nil { - return err + return fmt.Errorf("%s | %w", "At", err) } case "delayed": @@ -82,7 +83,7 @@ func (s *UnassignedInformation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Delayed", err) } s.Delayed = &value case bool: @@ -92,7 +93,7 @@ func (s *UnassignedInformation) UnmarshalJSON(data []byte) error { case "details": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Details", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,7 +110,7 @@ func (s *UnassignedInformation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "FailedAllocationAttempts", err) } s.FailedAllocationAttempts = &value case float64: @@ -120,7 +121,7 @@ func (s *UnassignedInformation) UnmarshalJSON(data []byte) error { case "last_allocation_status": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LastAllocationStatus", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -131,7 +132,7 @@ func (s *UnassignedInformation) UnmarshalJSON(data []byte) error { case "reason": if err := dec.Decode(&s.Reason); err != nil { - return err + return fmt.Errorf("%s | %w", "Reason", err) } } diff --git a/typedapi/types/uniquetokenfilter.go b/typedapi/types/uniquetokenfilter.go index 378a735e21..f2082e5b5a 100644 --- a/typedapi/types/uniquetokenfilter.go +++ b/typedapi/types/uniquetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // UniqueTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L335-L338 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L335-L338 type UniqueTokenFilter struct { OnlyOnSamePosition *bool `json:"only_on_same_position,omitempty"` Type string `json:"type,omitempty"` @@ -59,7 +60,7 @@ func (s *UniqueTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "OnlyOnSamePosition", err) } s.OnlyOnSamePosition = &value case bool: @@ -68,12 +69,12 @@ func (s *UniqueTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/unmappedraretermsaggregate.go b/typedapi/types/unmappedraretermsaggregate.go index dd1d73d3fa..a843803875 100644 --- a/typedapi/types/unmappedraretermsaggregate.go +++ b/typedapi/types/unmappedraretermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // UnmappedRareTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L453-L459 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L453-L459 type UnmappedRareTermsAggregate struct { Buckets BucketsVoid `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *UnmappedRareTermsAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]interface{}, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []interface{}{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/unmappedsampleraggregate.go b/typedapi/types/unmappedsampleraggregate.go index b2b8a9127e..dfb692b429 100644 --- a/typedapi/types/unmappedsampleraggregate.go +++ b/typedapi/types/unmappedsampleraggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // UnmappedSamplerAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L501-L502 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L501-L502 type UnmappedSamplerAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -61,7 +61,7 @@ func (s *UnmappedSamplerAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -71,7 +71,7 @@ func (s *UnmappedSamplerAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } default: @@ -88,490 +88,490 @@ func (s *UnmappedSamplerAggregate) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -581,7 +581,7 @@ func (s *UnmappedSamplerAggregate) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/unmappedsignificanttermsaggregate.go b/typedapi/types/unmappedsignificanttermsaggregate.go index e7ab75307e..8ffe8a41c3 100644 --- a/typedapi/types/unmappedsignificanttermsaggregate.go +++ b/typedapi/types/unmappedsignificanttermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // UnmappedSignificantTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L610-L616 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L610-L616 type UnmappedSignificantTermsAggregate struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsVoid `json:"buckets"` @@ -60,7 +61,7 @@ func (s *UnmappedSignificantTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "BgCount", err) } s.BgCount = &value case float64: @@ -78,13 +79,13 @@ func (s *UnmappedSignificantTermsAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]interface{}, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []interface{}{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -96,7 +97,7 @@ func (s *UnmappedSignificantTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = &value case float64: @@ -106,7 +107,7 @@ func (s *UnmappedSignificantTermsAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/unmappedtermsaggregate.go b/typedapi/types/unmappedtermsaggregate.go index c1e8faf682..6019264ee0 100644 --- a/typedapi/types/unmappedtermsaggregate.go +++ b/typedapi/types/unmappedtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // UnmappedTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L423-L429 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L423-L429 type UnmappedTermsAggregate struct { Buckets BucketsVoid `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -63,13 +64,13 @@ func (s *UnmappedTermsAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]interface{}, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []interface{}{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } @@ -81,7 +82,7 @@ func (s *UnmappedTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } s.DocCountErrorUpperBound = &value case float64: @@ -91,7 +92,7 @@ func (s *UnmappedTermsAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "sum_other_doc_count": @@ -101,7 +102,7 @@ func (s *UnmappedTermsAggregate) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "SumOtherDocCount", err) } s.SumOtherDocCount = &value case float64: diff --git a/typedapi/types/unrateddocument.go b/typedapi/types/unrateddocument.go index c0de63e1a2..8f2e9f5901 100644 --- a/typedapi/types/unrateddocument.go +++ b/typedapi/types/unrateddocument.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // UnratedDocument type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/rank_eval/types.ts#L147-L150 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/rank_eval/types.ts#L147-L150 type UnratedDocument struct { Id_ string `json:"_id"` Index_ string `json:"_index"` @@ -52,12 +53,12 @@ func (s *UnratedDocument) UnmarshalJSON(data []byte) error { case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } } diff --git a/typedapi/types/unsignedlongnumberproperty.go b/typedapi/types/unsignedlongnumberproperty.go index fba0798787..8da4ab41f9 100644 --- a/typedapi/types/unsignedlongnumberproperty.go +++ b/typedapi/types/unsignedlongnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -34,7 +35,7 @@ import ( // UnsignedLongNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L169-L172 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L169-L172 type UnsignedLongNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -84,7 +85,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = &f @@ -100,7 +101,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Coerce", err) } s.Coerce = &value case bool: @@ -113,13 +114,13 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -130,7 +131,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -139,7 +140,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -457,7 +458,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -472,7 +473,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMalformed", err) } s.IgnoreMalformed = &value case bool: @@ -486,7 +487,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } s.Index = &value case bool: @@ -498,17 +499,17 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": if err := dec.Decode(&s.NullValue); err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } case "on_script_error": if err := dec.Decode(&s.OnScriptError); err != nil { - return err + return fmt.Errorf("%s | %w", "OnScriptError", err) } case "properties": @@ -821,7 +822,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -830,7 +831,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -839,7 +840,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -847,7 +848,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -857,7 +858,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -873,7 +874,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -887,7 +888,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesDimension", err) } s.TimeSeriesDimension = &value case bool: @@ -896,12 +897,12 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { case "time_series_metric": if err := dec.Decode(&s.TimeSeriesMetric); err != nil { - return err + return fmt.Errorf("%s | %w", "TimeSeriesMetric", err) } case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/updateaction.go b/typedapi/types/updateaction.go index c62210e0dc..fc75ce466e 100644 --- a/typedapi/types/updateaction.go +++ b/typedapi/types/updateaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // UpdateAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/bulk/types.ts#L169-L205 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/bulk/types.ts#L169-L205 type UpdateAction struct { // DetectNoop Set to false to disable setting 'result' in the response // to 'noop' if no change to the document occurred. @@ -75,7 +76,7 @@ func (s *UpdateAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DetectNoop", err) } s.DetectNoop = &value case bool: @@ -84,7 +85,7 @@ func (s *UpdateAction) UnmarshalJSON(data []byte) error { case "doc": if err := dec.Decode(&s.Doc); err != nil { - return err + return fmt.Errorf("%s | %w", "Doc", err) } case "doc_as_upsert": @@ -94,7 +95,7 @@ func (s *UpdateAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocAsUpsert", err) } s.DocAsUpsert = &value case bool: @@ -104,7 +105,7 @@ func (s *UpdateAction) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -113,7 +114,7 @@ func (s *UpdateAction) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -122,7 +123,7 @@ func (s *UpdateAction) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -130,7 +131,7 @@ func (s *UpdateAction) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -144,7 +145,7 @@ func (s *UpdateAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ScriptedUpsert", err) } s.ScriptedUpsert = &value case bool: @@ -153,12 +154,12 @@ func (s *UpdateAction) UnmarshalJSON(data []byte) error { case "_source": if err := dec.Decode(&s.Source_); err != nil { - return err + return fmt.Errorf("%s | %w", "Source_", err) } case "upsert": if err := dec.Decode(&s.Upsert); err != nil { - return err + return fmt.Errorf("%s | %w", "Upsert", err) } } diff --git a/typedapi/types/updatebyqueryrethrottlenode.go b/typedapi/types/updatebyqueryrethrottlenode.go index 7ac60ece83..35f3056c97 100644 --- a/typedapi/types/updatebyqueryrethrottlenode.go +++ b/typedapi/types/updatebyqueryrethrottlenode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/noderole" @@ -31,7 +32,7 @@ import ( // UpdateByQueryRethrottleNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/update_by_query_rethrottle/UpdateByQueryRethrottleNode.ts#L25-L27 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/update_by_query_rethrottle/UpdateByQueryRethrottleNode.ts#L25-L27 type UpdateByQueryRethrottleNode struct { Attributes map[string]string `json:"attributes"` Host string `json:"host"` @@ -62,27 +63,27 @@ func (s *UpdateByQueryRethrottleNode) UnmarshalJSON(data []byte) error { s.Attributes = make(map[string]string, 0) } if err := dec.Decode(&s.Attributes); err != nil { - return err + return fmt.Errorf("%s | %w", "Attributes", err) } case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "ip": if err := dec.Decode(&s.Ip); err != nil { - return err + return fmt.Errorf("%s | %w", "Ip", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "tasks": @@ -90,12 +91,12 @@ func (s *UpdateByQueryRethrottleNode) UnmarshalJSON(data []byte) error { s.Tasks = make(map[string]TaskInfo, 0) } if err := dec.Decode(&s.Tasks); err != nil { - return err + return fmt.Errorf("%s | %w", "Tasks", err) } case "transport_address": if err := dec.Decode(&s.TransportAddress); err != nil { - return err + return fmt.Errorf("%s | %w", "TransportAddress", err) } } diff --git a/typedapi/types/updateoperation.go b/typedapi/types/updateoperation.go index 9669bafa28..5733fbdcd3 100644 --- a/typedapi/types/updateoperation.go +++ b/typedapi/types/updateoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // UpdateOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/bulk/types.ts#L136-L143 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/bulk/types.ts#L136-L143 type UpdateOperation struct { // Id_ The document ID. Id_ *string `json:"_id,omitempty"` @@ -66,7 +67,7 @@ func (s *UpdateOperation) UnmarshalJSON(data []byte) error { case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "if_primary_term": @@ -76,7 +77,7 @@ func (s *UpdateOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IfPrimaryTerm", err) } s.IfPrimaryTerm = &value case float64: @@ -86,12 +87,12 @@ func (s *UpdateOperation) UnmarshalJSON(data []byte) error { case "if_seq_no": if err := dec.Decode(&s.IfSeqNo); err != nil { - return err + return fmt.Errorf("%s | %w", "IfSeqNo", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "require_alias": @@ -101,7 +102,7 @@ func (s *UpdateOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RequireAlias", err) } s.RequireAlias = &value case bool: @@ -116,7 +117,7 @@ func (s *UpdateOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RetryOnConflict", err) } s.RetryOnConflict = &value case float64: @@ -126,17 +127,17 @@ func (s *UpdateOperation) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "version_type": if err := dec.Decode(&s.VersionType); err != nil { - return err + return fmt.Errorf("%s | %w", "VersionType", err) } } diff --git a/typedapi/types/uppercaseprocessor.go b/typedapi/types/uppercaseprocessor.go index 78e4415d97..1971c4683b 100644 --- a/typedapi/types/uppercaseprocessor.go +++ b/typedapi/types/uppercaseprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // UppercaseProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L1138-L1154 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L1138-L1154 type UppercaseProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -72,7 +73,7 @@ func (s *UppercaseProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,13 +84,13 @@ func (s *UppercaseProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +106,7 @@ func (s *UppercaseProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -119,7 +120,7 @@ func (s *UppercaseProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -128,13 +129,13 @@ func (s *UppercaseProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -145,7 +146,7 @@ func (s *UppercaseProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/uppercasetokenfilter.go b/typedapi/types/uppercasetokenfilter.go index 9828223f54..07369daec2 100644 --- a/typedapi/types/uppercasetokenfilter.go +++ b/typedapi/types/uppercasetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // UppercaseTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L340-L342 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L340-L342 type UppercaseTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` @@ -52,12 +53,12 @@ func (s *UppercaseTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/urldecodeprocessor.go b/typedapi/types/urldecodeprocessor.go index dd65fedb37..09923c244b 100644 --- a/typedapi/types/urldecodeprocessor.go +++ b/typedapi/types/urldecodeprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // UrlDecodeProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L1156-L1172 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L1156-L1172 type UrlDecodeProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -72,7 +73,7 @@ func (s *UrlDecodeProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,13 +84,13 @@ func (s *UrlDecodeProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -105,7 +106,7 @@ func (s *UrlDecodeProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -119,7 +120,7 @@ func (s *UrlDecodeProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -128,13 +129,13 @@ func (s *UrlDecodeProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -145,7 +146,7 @@ func (s *UrlDecodeProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/usagestatsindex.go b/typedapi/types/usagestatsindex.go index daae008ce7..ea7d741f59 100644 --- a/typedapi/types/usagestatsindex.go +++ b/typedapi/types/usagestatsindex.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // UsageStatsIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L38-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L38-L40 type UsageStatsIndex struct { Shards []UsageStatsShards `json:"shards"` } diff --git a/typedapi/types/usagestatsshards.go b/typedapi/types/usagestatsshards.go index 985effcf98..93a98594ce 100644 --- a/typedapi/types/usagestatsshards.go +++ b/typedapi/types/usagestatsshards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // UsageStatsShards type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L42-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L42-L47 type UsageStatsShards struct { Routing ShardRouting `json:"routing"` Stats IndicesShardsStats `json:"stats"` @@ -55,18 +56,18 @@ func (s *UsageStatsShards) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "stats": if err := dec.Decode(&s.Stats); err != nil { - return err + return fmt.Errorf("%s | %w", "Stats", err) } case "tracking_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TrackingId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -77,7 +78,7 @@ func (s *UsageStatsShards) UnmarshalJSON(data []byte) error { case "tracking_started_at_millis": if err := dec.Decode(&s.TrackingStartedAtMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TrackingStartedAtMillis", err) } } diff --git a/typedapi/types/user.go b/typedapi/types/user.go index 51648081dc..129d84adb3 100644 --- a/typedapi/types/user.go +++ b/typedapi/types/user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // User type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/User.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/User.ts#L23-L31 type User struct { Email string `json:"email,omitempty"` Enabled bool `json:"enabled"` @@ -59,7 +60,7 @@ func (s *User) UnmarshalJSON(data []byte) error { case "email": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Email", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -75,7 +76,7 @@ func (s *User) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -84,27 +85,27 @@ func (s *User) UnmarshalJSON(data []byte) error { case "full_name": if err := dec.Decode(&s.FullName); err != nil { - return err + return fmt.Errorf("%s | %w", "FullName", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "profile_uid": if err := dec.Decode(&s.ProfileUid); err != nil { - return err + return fmt.Errorf("%s | %w", "ProfileUid", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "username": if err := dec.Decode(&s.Username); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } } diff --git a/typedapi/types/useragentprocessor.go b/typedapi/types/useragentprocessor.go index f72dd7d757..d31c50eefb 100644 --- a/typedapi/types/useragentprocessor.go +++ b/typedapi/types/useragentprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // UserAgentProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ingest/_types/Processors.ts#L370-L390 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ingest/_types/Processors.ts#L370-L390 type UserAgentProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -80,7 +81,7 @@ func (s *UserAgentProcessor) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -91,13 +92,13 @@ func (s *UserAgentProcessor) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "if": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "If", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -113,7 +114,7 @@ func (s *UserAgentProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreFailure", err) } s.IgnoreFailure = &value case bool: @@ -127,7 +128,7 @@ func (s *UserAgentProcessor) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreMissing", err) } s.IgnoreMissing = &value case bool: @@ -136,18 +137,18 @@ func (s *UserAgentProcessor) UnmarshalJSON(data []byte) error { case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { - return err + return fmt.Errorf("%s | %w", "OnFailure", err) } case "options": if err := dec.Decode(&s.Options); err != nil { - return err + return fmt.Errorf("%s | %w", "Options", err) } case "regex_file": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "RegexFile", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -159,7 +160,7 @@ func (s *UserAgentProcessor) UnmarshalJSON(data []byte) error { case "tag": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Tag", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -170,7 +171,7 @@ func (s *UserAgentProcessor) UnmarshalJSON(data []byte) error { case "target_field": if err := dec.Decode(&s.TargetField); err != nil { - return err + return fmt.Errorf("%s | %w", "TargetField", err) } } diff --git a/typedapi/types/userindicesprivileges.go b/typedapi/types/userindicesprivileges.go index d327f53e0b..0e69c32891 100644 --- a/typedapi/types/userindicesprivileges.go +++ b/typedapi/types/userindicesprivileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // UserIndicesPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/Privileges.ts#L107-L129 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/Privileges.ts#L107-L129 type UserIndicesPrivileges struct { // AllowRestrictedIndices Set to `true` if using wildcard or regular expressions for patterns that // cover restricted indices. Implicitly, restricted indices have limited @@ -77,7 +78,7 @@ func (s *UserIndicesPrivileges) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AllowRestrictedIndices", err) } s.AllowRestrictedIndices = value case bool: @@ -86,7 +87,7 @@ func (s *UserIndicesPrivileges) UnmarshalJSON(data []byte) error { case "field_security": if err := dec.Decode(&s.FieldSecurity); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldSecurity", err) } case "names": @@ -95,24 +96,24 @@ func (s *UserIndicesPrivileges) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Names", err) } s.Names = append(s.Names, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Names); err != nil { - return err + return fmt.Errorf("%s | %w", "Names", err) } } case "privileges": if err := dec.Decode(&s.Privileges); err != nil { - return err + return fmt.Errorf("%s | %w", "Privileges", err) } case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } } diff --git a/typedapi/types/userprofile.go b/typedapi/types/userprofile.go index f6e19feef5..faff97dacf 100644 --- a/typedapi/types/userprofile.go +++ b/typedapi/types/userprofile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // UserProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/UserProfile.ts#L42-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/UserProfile.ts#L42-L48 type UserProfile struct { Data map[string]json.RawMessage `json:"data"` Enabled *bool `json:"enabled,omitempty"` @@ -59,7 +60,7 @@ func (s *UserProfile) UnmarshalJSON(data []byte) error { s.Data = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Data); err != nil { - return err + return fmt.Errorf("%s | %w", "Data", err) } case "enabled": @@ -69,7 +70,7 @@ func (s *UserProfile) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = &value case bool: @@ -81,17 +82,17 @@ func (s *UserProfile) UnmarshalJSON(data []byte) error { s.Labels = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Labels); err != nil { - return err + return fmt.Errorf("%s | %w", "Labels", err) } case "uid": if err := dec.Decode(&s.Uid); err != nil { - return err + return fmt.Errorf("%s | %w", "Uid", err) } case "user": if err := dec.Decode(&s.User); err != nil { - return err + return fmt.Errorf("%s | %w", "User", err) } } diff --git a/typedapi/types/userprofilehitmetadata.go b/typedapi/types/userprofilehitmetadata.go index b60f432b09..05b91c784e 100644 --- a/typedapi/types/userprofilehitmetadata.go +++ b/typedapi/types/userprofilehitmetadata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // UserProfileHitMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/UserProfile.ts#L28-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/UserProfile.ts#L28-L31 type UserProfileHitMetadata struct { PrimaryTerm_ int64 `json:"_primary_term"` SeqNo_ int64 `json:"_seq_no"` @@ -58,7 +59,7 @@ func (s *UserProfileHitMetadata) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "PrimaryTerm_", err) } s.PrimaryTerm_ = value case float64: @@ -68,7 +69,7 @@ func (s *UserProfileHitMetadata) UnmarshalJSON(data []byte) error { case "_seq_no": if err := dec.Decode(&s.SeqNo_); err != nil { - return err + return fmt.Errorf("%s | %w", "SeqNo_", err) } } diff --git a/typedapi/types/userprofileuser.go b/typedapi/types/userprofileuser.go index 0d98e55882..b930b2345c 100644 --- a/typedapi/types/userprofileuser.go +++ b/typedapi/types/userprofileuser.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // UserProfileUser type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/UserProfile.ts#L33-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/UserProfile.ts#L33-L40 type UserProfileUser struct { Email string `json:"email,omitempty"` FullName string `json:"full_name,omitempty"` @@ -58,7 +59,7 @@ func (s *UserProfileUser) UnmarshalJSON(data []byte) error { case "email": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Email", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -69,27 +70,27 @@ func (s *UserProfileUser) UnmarshalJSON(data []byte) error { case "full_name": if err := dec.Decode(&s.FullName); err != nil { - return err + return fmt.Errorf("%s | %w", "FullName", err) } case "realm_domain": if err := dec.Decode(&s.RealmDomain); err != nil { - return err + return fmt.Errorf("%s | %w", "RealmDomain", err) } case "realm_name": if err := dec.Decode(&s.RealmName); err != nil { - return err + return fmt.Errorf("%s | %w", "RealmName", err) } case "roles": if err := dec.Decode(&s.Roles); err != nil { - return err + return fmt.Errorf("%s | %w", "Roles", err) } case "username": if err := dec.Decode(&s.Username); err != nil { - return err + return fmt.Errorf("%s | %w", "Username", err) } } diff --git a/typedapi/types/userprofilewithmetadata.go b/typedapi/types/userprofilewithmetadata.go index d3450e166c..e4037ee5e6 100644 --- a/typedapi/types/userprofilewithmetadata.go +++ b/typedapi/types/userprofilewithmetadata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // UserProfileWithMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/_types/UserProfile.ts#L50-L53 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/_types/UserProfile.ts#L50-L53 type UserProfileWithMetadata struct { Data map[string]json.RawMessage `json:"data"` Doc_ UserProfileHitMetadata `json:"_doc"` @@ -61,12 +62,12 @@ func (s *UserProfileWithMetadata) UnmarshalJSON(data []byte) error { s.Data = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Data); err != nil { - return err + return fmt.Errorf("%s | %w", "Data", err) } case "_doc": if err := dec.Decode(&s.Doc_); err != nil { - return err + return fmt.Errorf("%s | %w", "Doc_", err) } case "enabled": @@ -76,7 +77,7 @@ func (s *UserProfileWithMetadata) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = &value case bool: @@ -88,7 +89,7 @@ func (s *UserProfileWithMetadata) UnmarshalJSON(data []byte) error { s.Labels = make(map[string]json.RawMessage, 0) } if err := dec.Decode(&s.Labels); err != nil { - return err + return fmt.Errorf("%s | %w", "Labels", err) } case "last_synchronized": @@ -98,7 +99,7 @@ func (s *UserProfileWithMetadata) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "LastSynchronized", err) } s.LastSynchronized = value case float64: @@ -108,12 +109,12 @@ func (s *UserProfileWithMetadata) UnmarshalJSON(data []byte) error { case "uid": if err := dec.Decode(&s.Uid); err != nil { - return err + return fmt.Errorf("%s | %w", "Uid", err) } case "user": if err := dec.Decode(&s.User); err != nil { - return err + return fmt.Errorf("%s | %w", "User", err) } } diff --git a/typedapi/types/userrealm.go b/typedapi/types/userrealm.go index c5552ec189..f7398c9d44 100644 --- a/typedapi/types/userrealm.go +++ b/typedapi/types/userrealm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // UserRealm type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/security/get_token/types.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/security/get_token/types.ts#L30-L33 type UserRealm struct { Name string `json:"name"` Type string `json:"type"` @@ -53,13 +54,13 @@ func (s *UserRealm) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/validationloss.go b/typedapi/types/validationloss.go index 2a7b8928e1..4c85c0c6eb 100644 --- a/typedapi/types/validationloss.go +++ b/typedapi/types/validationloss.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ValidationLoss type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/DataframeAnalytics.ts#L570-L575 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/DataframeAnalytics.ts#L570-L575 type ValidationLoss struct { // FoldValues Validation loss values for every added decision tree during the forest // growing procedure. @@ -56,13 +57,13 @@ func (s *ValidationLoss) UnmarshalJSON(data []byte) error { case "fold_values": if err := dec.Decode(&s.FoldValues); err != nil { - return err + return fmt.Errorf("%s | %w", "FoldValues", err) } case "loss_type": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "LossType", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/valuecountaggregate.go b/typedapi/types/valuecountaggregate.go index ecec2cde04..efe10c507e 100644 --- a/typedapi/types/valuecountaggregate.go +++ b/typedapi/types/valuecountaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ValueCountAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L218-L222 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L218-L222 type ValueCountAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to @@ -57,18 +58,18 @@ func (s *ValueCountAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/valuecountaggregation.go b/typedapi/types/valuecountaggregation.go index 82dd82e364..604afc2768 100644 --- a/typedapi/types/valuecountaggregation.go +++ b/typedapi/types/valuecountaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ValueCountAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L417-L417 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L417-L417 type ValueCountAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -58,13 +59,13 @@ func (s *ValueCountAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -75,13 +76,13 @@ func (s *ValueCountAggregation) UnmarshalJSON(data []byte) error { case "missing": if err := dec.Decode(&s.Missing); err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -90,7 +91,7 @@ func (s *ValueCountAggregation) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -99,7 +100,7 @@ func (s *ValueCountAggregation) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -107,7 +108,7 @@ func (s *ValueCountAggregation) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/variablewidthhistogramaggregate.go b/typedapi/types/variablewidthhistogramaggregate.go index 920e5e02ac..ca2dcc8ead 100644 --- a/typedapi/types/variablewidthhistogramaggregate.go +++ b/typedapi/types/variablewidthhistogramaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // VariableWidthHistogramAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L362-L364 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L362-L364 type VariableWidthHistogramAggregate struct { Buckets BucketsVariableWidthHistogramBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -60,20 +61,20 @@ func (s *VariableWidthHistogramAggregate) UnmarshalJSON(data []byte) error { case '{': o := make(map[string]VariableWidthHistogramBucket, 0) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': o := []VariableWidthHistogramBucket{} if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o } case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } } diff --git a/typedapi/types/variablewidthhistogramaggregation.go b/typedapi/types/variablewidthhistogramaggregation.go index 98a135544d..bd1426df00 100644 --- a/typedapi/types/variablewidthhistogramaggregation.go +++ b/typedapi/types/variablewidthhistogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // VariableWidthHistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/bucket.ts#L1015-L1035 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/bucket.ts#L1015-L1035 type VariableWidthHistogramAggregation struct { // Buckets The target number of buckets. Buckets *int `json:"buckets,omitempty"` @@ -69,7 +70,7 @@ func (s *VariableWidthHistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = &value case float64: @@ -79,7 +80,7 @@ func (s *VariableWidthHistogramAggregation) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "initial_buffer": @@ -90,7 +91,7 @@ func (s *VariableWidthHistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "InitialBuffer", err) } s.InitialBuffer = &value case float64: @@ -106,7 +107,7 @@ func (s *VariableWidthHistogramAggregation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardSize", err) } s.ShardSize = &value case float64: diff --git a/typedapi/types/variablewidthhistogrambucket.go b/typedapi/types/variablewidthhistogrambucket.go index 9936220d62..936b6f879d 100644 --- a/typedapi/types/variablewidthhistogrambucket.go +++ b/typedapi/types/variablewidthhistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -32,7 +32,7 @@ import ( // VariableWidthHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L366-L373 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L366-L373 type VariableWidthHistogramBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -66,7 +66,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocCount", err) } s.DocCount = value case float64: @@ -81,7 +81,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Key", err) } f := Float64(value) s.Key = f @@ -93,7 +93,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { case "key_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "KeyAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -109,7 +109,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Max", err) } f := Float64(value) s.Max = f @@ -121,7 +121,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { case "max_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MaxAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -137,7 +137,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Min", err) } f := Float64(value) s.Min = f @@ -149,7 +149,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { case "min_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "MinAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -172,490 +172,490 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { case "cardinality": o := NewCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentiles": o := NewHdrPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "percentiles_bucket": o := NewPercentilesBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "min": o := NewMinAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "max": o := NewMaxAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sum": o := NewSumAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "avg": o := NewAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "weighted_avg": o := NewWeightedAvgAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "value_count": o := NewValueCountAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_value": o := NewSimpleValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "derivative": o := NewDerivativeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "bucket_metric_value": o := NewBucketMetricValueAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats": o := NewStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "stats_bucket": o := NewStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats": o := NewExtendedStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_bounds": o := NewGeoBoundsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_centroid": o := NewGeoCentroidAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "histogram": o := NewHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_histogram": o := NewDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "auto_date_histogram": o := NewAutoDateHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sterms": o := NewStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lterms": o := NewLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "dterms": o := NewDoubleTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umterms": o := NewUnmappedTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "lrareterms": o := NewLongRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "srareterms": o := NewStringRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umrareterms": o := NewUnmappedRareTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "multi_terms": o := NewMultiTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "missing": o := NewMissingAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "nested": o := NewNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "reverse_nested": o := NewReverseNestedAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "global": o := NewGlobalAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filter": o := NewFilterAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "children": o := NewChildrenAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "parent": o := NewParentAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sampler": o := NewSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "unmapped_sampler": o := NewUnmappedSamplerAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohash_grid": o := NewGeoHashGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geotile_grid": o := NewGeoTileGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geohex_grid": o := NewGeoHexGridAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "range": o := NewRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "date_range": o := NewDateRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_distance": o := NewGeoDistanceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_range": o := NewIpRangeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "ip_prefix": o := NewIpPrefixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "filters": o := NewFiltersAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "siglterms": o := NewSignificantLongTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "sigsterms": o := NewSignificantStringTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "umsigterms": o := NewUnmappedSignificantTermsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "composite": o := NewCompositeAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "frequent_item_sets": o := NewFrequentItemSetsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "scripted_metric": o := NewScriptedMetricAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_hits": o := NewTopHitsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "inference": o := NewInferenceAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "string_stats": o := NewStringStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "box_plot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "top_metrics": o := NewTopMetricsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "t_test": o := NewTTestAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "rate": o := NewRateAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "simple_long_value": o := NewCumulativeCardinalityAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "matrix_stats": o := NewMatrixStatsAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o case "geo_line": o := NewGeoLineAggregate() if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[elems[1]] = o } @@ -665,7 +665,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { } else { o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Aggregations", err) } s.Aggregations[value] = o } diff --git a/typedapi/types/vector.go b/typedapi/types/vector.go index 7180efb831..661016c6f3 100644 --- a/typedapi/types/vector.go +++ b/typedapi/types/vector.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Vector type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L454-L458 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L454-L458 type Vector struct { Available bool `json:"available"` DenseVectorDimsAvgCount int `json:"dense_vector_dims_avg_count"` @@ -61,7 +62,7 @@ func (s *Vector) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -76,7 +77,7 @@ func (s *Vector) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DenseVectorDimsAvgCount", err) } s.DenseVectorDimsAvgCount = value case float64: @@ -92,7 +93,7 @@ func (s *Vector) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DenseVectorFieldsCount", err) } s.DenseVectorFieldsCount = value case float64: @@ -107,7 +108,7 @@ func (s *Vector) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -122,7 +123,7 @@ func (s *Vector) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SparseVectorFieldsCount", err) } s.SparseVectorFieldsCount = &value case float64: diff --git a/typedapi/types/verifyindex.go b/typedapi/types/verifyindex.go index 48f05b5baa..51c90ad763 100644 --- a/typedapi/types/verifyindex.go +++ b/typedapi/types/verifyindex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // VerifyIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/indices/recovery/types.ts#L111-L116 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/indices/recovery/types.ts#L111-L116 type VerifyIndex struct { CheckIndexTime Duration `json:"check_index_time,omitempty"` CheckIndexTimeInMillis int64 `json:"check_index_time_in_millis"` @@ -54,22 +55,22 @@ func (s *VerifyIndex) UnmarshalJSON(data []byte) error { case "check_index_time": if err := dec.Decode(&s.CheckIndexTime); err != nil { - return err + return fmt.Errorf("%s | %w", "CheckIndexTime", err) } case "check_index_time_in_millis": if err := dec.Decode(&s.CheckIndexTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "CheckIndexTimeInMillis", err) } case "total_time": if err := dec.Decode(&s.TotalTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTime", err) } case "total_time_in_millis": if err := dec.Decode(&s.TotalTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTimeInMillis", err) } } diff --git a/typedapi/types/versionproperty.go b/typedapi/types/versionproperty.go index 31ff5a35c8..e9bc806727 100644 --- a/typedapi/types/versionproperty.go +++ b/typedapi/types/versionproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // VersionProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L272-L274 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L273-L275 type VersionProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -68,13 +69,13 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -85,7 +86,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -94,7 +95,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -412,7 +413,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -425,7 +426,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "properties": @@ -738,7 +739,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -754,7 +755,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -763,7 +764,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/vertex.go b/typedapi/types/vertex.go index ea6b68ac8e..9d36a2166f 100644 --- a/typedapi/types/vertex.go +++ b/typedapi/types/vertex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Vertex type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/graph/_types/Vertex.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/graph/_types/Vertex.ts#L23-L28 type Vertex struct { Depth int64 `json:"depth"` Field string `json:"field"` @@ -60,7 +61,7 @@ func (s *Vertex) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Depth", err) } s.Depth = value case float64: @@ -70,13 +71,13 @@ func (s *Vertex) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "term": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Term", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -92,7 +93,7 @@ func (s *Vertex) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Weight", err) } f := Float64(value) s.Weight = f diff --git a/typedapi/types/vertexdefinition.go b/typedapi/types/vertexdefinition.go index 82b24f0840..26c01cf7f7 100644 --- a/typedapi/types/vertexdefinition.go +++ b/typedapi/types/vertexdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // VertexDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/graph/_types/Vertex.ts#L30-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/graph/_types/Vertex.ts#L30-L59 type VertexDefinition struct { // Exclude Prevents the specified terms from being included in the results. Exclude []string `json:"exclude,omitempty"` @@ -67,17 +68,17 @@ func (s *VertexDefinition) UnmarshalJSON(data []byte) error { case "exclude": if err := dec.Decode(&s.Exclude); err != nil { - return err + return fmt.Errorf("%s | %w", "Exclude", err) } case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "include": if err := dec.Decode(&s.Include); err != nil { - return err + return fmt.Errorf("%s | %w", "Include", err) } case "min_doc_count": @@ -87,7 +88,7 @@ func (s *VertexDefinition) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "MinDocCount", err) } s.MinDocCount = &value case float64: @@ -102,7 +103,7 @@ func (s *VertexDefinition) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "ShardMinDocCount", err) } s.ShardMinDocCount = &value case float64: @@ -118,7 +119,7 @@ func (s *VertexDefinition) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: diff --git a/typedapi/types/vertexinclude.go b/typedapi/types/vertexinclude.go index 6620d0df42..b7956476d5 100644 --- a/typedapi/types/vertexinclude.go +++ b/typedapi/types/vertexinclude.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // VertexInclude type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/graph/_types/Vertex.ts#L61-L64 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/graph/_types/Vertex.ts#L61-L64 type VertexInclude struct { Boost Float64 `json:"boost"` Term string `json:"term"` @@ -58,7 +59,7 @@ func (s *VertexInclude) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := Float64(value) s.Boost = f @@ -70,7 +71,7 @@ func (s *VertexInclude) UnmarshalJSON(data []byte) error { case "term": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Term", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/vocabulary.go b/typedapi/types/vocabulary.go index c50bd492f2..9abe0af292 100644 --- a/typedapi/types/vocabulary.go +++ b/typedapi/types/vocabulary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Vocabulary type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L233-L235 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L233-L235 type Vocabulary struct { Index string `json:"index"` } @@ -51,7 +52,7 @@ func (s *Vocabulary) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } } diff --git a/typedapi/types/waitforactiveshards.go b/typedapi/types/waitforactiveshards.go index dd82e47d63..da9cf031bc 100644 --- a/typedapi/types/waitforactiveshards.go +++ b/typedapi/types/waitforactiveshards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -25,5 +25,5 @@ package types // int // waitforactiveshardoptions.WaitForActiveShardOptions // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/common.ts#L142-L143 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/common.ts#L142-L143 type WaitForActiveShards interface{} diff --git a/typedapi/types/warmerstats.go b/typedapi/types/warmerstats.go index a95c054030..483e4b1155 100644 --- a/typedapi/types/warmerstats.go +++ b/typedapi/types/warmerstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // WarmerStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Stats.ts#L407-L412 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Stats.ts#L407-L412 type WarmerStats struct { Current int64 `json:"current"` Total int64 `json:"total"` @@ -60,7 +61,7 @@ func (s *WarmerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Current", err) } s.Current = value case float64: @@ -75,7 +76,7 @@ func (s *WarmerStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: @@ -85,12 +86,12 @@ func (s *WarmerStats) UnmarshalJSON(data []byte) error { case "total_time": if err := dec.Decode(&s.TotalTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTime", err) } case "total_time_in_millis": if err := dec.Decode(&s.TotalTimeInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTimeInMillis", err) } } diff --git a/typedapi/types/watch.go b/typedapi/types/watch.go index 3474fafb5b..a1b83266b2 100644 --- a/typedapi/types/watch.go +++ b/typedapi/types/watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // Watch type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Watch.ts#L37-L47 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Watch.ts#L37-L47 type Watch struct { Actions map[string]WatcherAction `json:"actions"` Condition WatcherCondition `json:"condition"` @@ -62,47 +63,47 @@ func (s *Watch) UnmarshalJSON(data []byte) error { s.Actions = make(map[string]WatcherAction, 0) } if err := dec.Decode(&s.Actions); err != nil { - return err + return fmt.Errorf("%s | %w", "Actions", err) } case "condition": if err := dec.Decode(&s.Condition); err != nil { - return err + return fmt.Errorf("%s | %w", "Condition", err) } case "input": if err := dec.Decode(&s.Input); err != nil { - return err + return fmt.Errorf("%s | %w", "Input", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "throttle_period": if err := dec.Decode(&s.ThrottlePeriod); err != nil { - return err + return fmt.Errorf("%s | %w", "ThrottlePeriod", err) } case "throttle_period_in_millis": if err := dec.Decode(&s.ThrottlePeriodInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ThrottlePeriodInMillis", err) } case "transform": if err := dec.Decode(&s.Transform); err != nil { - return err + return fmt.Errorf("%s | %w", "Transform", err) } case "trigger": if err := dec.Decode(&s.Trigger); err != nil { - return err + return fmt.Errorf("%s | %w", "Trigger", err) } } diff --git a/typedapi/types/watcher.go b/typedapi/types/watcher.go index 76e251db54..0a195698d5 100644 --- a/typedapi/types/watcher.go +++ b/typedapi/types/watcher.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Watcher type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L460-L464 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L460-L464 type Watcher struct { Available bool `json:"available"` Count Counter `json:"count"` @@ -61,7 +62,7 @@ func (s *Watcher) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -70,7 +71,7 @@ func (s *Watcher) UnmarshalJSON(data []byte) error { case "count": if err := dec.Decode(&s.Count); err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } case "enabled": @@ -80,7 +81,7 @@ func (s *Watcher) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -89,12 +90,12 @@ func (s *Watcher) UnmarshalJSON(data []byte) error { case "execution": if err := dec.Decode(&s.Execution); err != nil { - return err + return fmt.Errorf("%s | %w", "Execution", err) } case "watch": if err := dec.Decode(&s.Watch); err != nil { - return err + return fmt.Errorf("%s | %w", "Watch", err) } } diff --git a/typedapi/types/watcheraction.go b/typedapi/types/watcheraction.go index 7671f8c32c..26d96ae0b7 100644 --- a/typedapi/types/watcheraction.go +++ b/typedapi/types/watcheraction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // WatcherAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Action.ts#L41-L60 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Action.ts#L41-L60 type WatcherAction struct { ActionType *actiontype.ActionType `json:"action_type,omitempty"` Condition *WatcherCondition `json:"condition,omitempty"` @@ -67,23 +68,23 @@ func (s *WatcherAction) UnmarshalJSON(data []byte) error { case "action_type": if err := dec.Decode(&s.ActionType); err != nil { - return err + return fmt.Errorf("%s | %w", "ActionType", err) } case "condition": if err := dec.Decode(&s.Condition); err != nil { - return err + return fmt.Errorf("%s | %w", "Condition", err) } case "email": if err := dec.Decode(&s.Email); err != nil { - return err + return fmt.Errorf("%s | %w", "Email", err) } case "foreach": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Foreach", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -94,12 +95,12 @@ func (s *WatcherAction) UnmarshalJSON(data []byte) error { case "index": if err := dec.Decode(&s.Index); err != nil { - return err + return fmt.Errorf("%s | %w", "Index", err) } case "logging": if err := dec.Decode(&s.Logging); err != nil { - return err + return fmt.Errorf("%s | %w", "Logging", err) } case "max_iterations": @@ -110,7 +111,7 @@ func (s *WatcherAction) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxIterations", err) } s.MaxIterations = &value case float64: @@ -120,37 +121,37 @@ func (s *WatcherAction) UnmarshalJSON(data []byte) error { case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "pagerduty": if err := dec.Decode(&s.Pagerduty); err != nil { - return err + return fmt.Errorf("%s | %w", "Pagerduty", err) } case "slack": if err := dec.Decode(&s.Slack); err != nil { - return err + return fmt.Errorf("%s | %w", "Slack", err) } case "throttle_period": if err := dec.Decode(&s.ThrottlePeriod); err != nil { - return err + return fmt.Errorf("%s | %w", "ThrottlePeriod", err) } case "throttle_period_in_millis": if err := dec.Decode(&s.ThrottlePeriodInMillis); err != nil { - return err + return fmt.Errorf("%s | %w", "ThrottlePeriodInMillis", err) } case "transform": if err := dec.Decode(&s.Transform); err != nil { - return err + return fmt.Errorf("%s | %w", "Transform", err) } case "webhook": if err := dec.Decode(&s.Webhook); err != nil { - return err + return fmt.Errorf("%s | %w", "Webhook", err) } } diff --git a/typedapi/types/watcheractions.go b/typedapi/types/watcheractions.go index 62e9fe0b3f..932ebaa1a9 100644 --- a/typedapi/types/watcheractions.go +++ b/typedapi/types/watcheractions.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // WatcherActions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L396-L398 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L396-L398 type WatcherActions struct { Actions map[string]WatcherActionTotals `json:"actions"` } diff --git a/typedapi/types/watcheractiontotals.go b/typedapi/types/watcheractiontotals.go index 54afe9ca3a..9c52ce9f88 100644 --- a/typedapi/types/watcheractiontotals.go +++ b/typedapi/types/watcheractiontotals.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // WatcherActionTotals type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L412-L415 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L412-L415 type WatcherActionTotals struct { Total Duration `json:"total"` TotalTimeInMs int64 `json:"total_time_in_ms"` @@ -52,12 +53,12 @@ func (s *WatcherActionTotals) UnmarshalJSON(data []byte) error { case "total": if err := dec.Decode(&s.Total); err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } case "total_time_in_ms": if err := dec.Decode(&s.TotalTimeInMs); err != nil { - return err + return fmt.Errorf("%s | %w", "TotalTimeInMs", err) } } diff --git a/typedapi/types/watchercondition.go b/typedapi/types/watchercondition.go index 859ff2102c..0c0b8b93b2 100644 --- a/typedapi/types/watchercondition.go +++ b/typedapi/types/watchercondition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // WatcherCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Conditions.ts#L47-L59 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Conditions.ts#L47-L59 type WatcherCondition struct { Always *AlwaysCondition `json:"always,omitempty"` ArrayCompare map[string]ArrayCompareCondition `json:"array_compare,omitempty"` diff --git a/typedapi/types/watcherinput.go b/typedapi/types/watcherinput.go index de4e44fb66..5e1753e80e 100644 --- a/typedapi/types/watcherinput.go +++ b/typedapi/types/watcherinput.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -26,7 +26,7 @@ import ( // WatcherInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Input.ts#L90-L98 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Input.ts#L90-L98 type WatcherInput struct { Chain *ChainInput `json:"chain,omitempty"` Http *HttpInput `json:"http,omitempty"` diff --git a/typedapi/types/watchernodestats.go b/typedapi/types/watchernodestats.go index 24f7201b77..9f8c2a8124 100644 --- a/typedapi/types/watchernodestats.go +++ b/typedapi/types/watchernodestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // WatcherNodeStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/stats/types.ts#L33-L40 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/stats/types.ts#L33-L40 type WatcherNodeStats struct { CurrentWatches []WatchRecordStats `json:"current_watches,omitempty"` ExecutionThreadPool ExecutionThreadPool `json:"execution_thread_pool"` @@ -59,22 +60,22 @@ func (s *WatcherNodeStats) UnmarshalJSON(data []byte) error { case "current_watches": if err := dec.Decode(&s.CurrentWatches); err != nil { - return err + return fmt.Errorf("%s | %w", "CurrentWatches", err) } case "execution_thread_pool": if err := dec.Decode(&s.ExecutionThreadPool); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutionThreadPool", err) } case "node_id": if err := dec.Decode(&s.NodeId); err != nil { - return err + return fmt.Errorf("%s | %w", "NodeId", err) } case "queued_watches": if err := dec.Decode(&s.QueuedWatches); err != nil { - return err + return fmt.Errorf("%s | %w", "QueuedWatches", err) } case "watch_count": @@ -84,7 +85,7 @@ func (s *WatcherNodeStats) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "WatchCount", err) } s.WatchCount = value case float64: @@ -94,7 +95,7 @@ func (s *WatcherNodeStats) UnmarshalJSON(data []byte) error { case "watcher_state": if err := dec.Decode(&s.WatcherState); err != nil { - return err + return fmt.Errorf("%s | %w", "WatcherState", err) } } diff --git a/typedapi/types/watcherstatusactions.go b/typedapi/types/watcherstatusactions.go index 9086122ffa..73a5f9d785 100644 --- a/typedapi/types/watcherstatusactions.go +++ b/typedapi/types/watcherstatusactions.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // WatcherStatusActions type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Action.ts#L62-L62 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Action.ts#L62-L62 type WatcherStatusActions map[string]ActionStatus diff --git a/typedapi/types/watcherwatch.go b/typedapi/types/watcherwatch.go index db5426cf41..6cebd160ad 100644 --- a/typedapi/types/watcherwatch.go +++ b/typedapi/types/watcherwatch.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // WatcherWatch type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L400-L405 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L400-L405 type WatcherWatch struct { Action map[string]Counter `json:"action,omitempty"` Condition map[string]Counter `json:"condition,omitempty"` diff --git a/typedapi/types/watcherwatchtrigger.go b/typedapi/types/watcherwatchtrigger.go index 76f9f8a39c..4ce595d025 100644 --- a/typedapi/types/watcherwatchtrigger.go +++ b/typedapi/types/watcherwatchtrigger.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // WatcherWatchTrigger type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L407-L410 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L407-L410 type WatcherWatchTrigger struct { All_ Counter `json:"_all"` Schedule *WatcherWatchTriggerSchedule `json:"schedule,omitempty"` diff --git a/typedapi/types/watcherwatchtriggerschedule.go b/typedapi/types/watcherwatchtriggerschedule.go index 1132ec7118..12c4410c8c 100644 --- a/typedapi/types/watcherwatchtriggerschedule.go +++ b/typedapi/types/watcherwatchtriggerschedule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // WatcherWatchTriggerSchedule type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L466-L469 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L466-L469 type WatcherWatchTriggerSchedule struct { Active int64 `json:"active"` All_ Counter `json:"_all"` @@ -60,7 +61,7 @@ func (s *WatcherWatchTriggerSchedule) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Active", err) } s.Active = value case float64: @@ -70,12 +71,12 @@ func (s *WatcherWatchTriggerSchedule) UnmarshalJSON(data []byte) error { case "_all": if err := dec.Decode(&s.All_); err != nil { - return err + return fmt.Errorf("%s | %w", "All_", err) } case "cron": if err := dec.Decode(&s.Cron); err != nil { - return err + return fmt.Errorf("%s | %w", "Cron", err) } case "total": @@ -85,7 +86,7 @@ func (s *WatcherWatchTriggerSchedule) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = value case float64: diff --git a/typedapi/types/watchrecord.go b/typedapi/types/watchrecord.go index 9d6a2d606b..b697ba04dd 100644 --- a/typedapi/types/watchrecord.go +++ b/typedapi/types/watchrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // WatchRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/execute_watch/types.ts#L27-L39 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/execute_watch/types.ts#L27-L39 type WatchRecord struct { Condition WatcherCondition `json:"condition"` Input WatcherInput `json:"input"` @@ -64,28 +65,28 @@ func (s *WatchRecord) UnmarshalJSON(data []byte) error { case "condition": if err := dec.Decode(&s.Condition); err != nil { - return err + return fmt.Errorf("%s | %w", "Condition", err) } case "input": if err := dec.Decode(&s.Input); err != nil { - return err + return fmt.Errorf("%s | %w", "Input", err) } case "messages": if err := dec.Decode(&s.Messages); err != nil { - return err + return fmt.Errorf("%s | %w", "Messages", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "node": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Node", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -96,32 +97,32 @@ func (s *WatchRecord) UnmarshalJSON(data []byte) error { case "result": if err := dec.Decode(&s.Result); err != nil { - return err + return fmt.Errorf("%s | %w", "Result", err) } case "state": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } case "status": if err := dec.Decode(&s.Status); err != nil { - return err + return fmt.Errorf("%s | %w", "Status", err) } case "trigger_event": if err := dec.Decode(&s.TriggerEvent); err != nil { - return err + return fmt.Errorf("%s | %w", "TriggerEvent", err) } case "user": if err := dec.Decode(&s.User); err != nil { - return err + return fmt.Errorf("%s | %w", "User", err) } case "watch_id": if err := dec.Decode(&s.WatchId); err != nil { - return err + return fmt.Errorf("%s | %w", "WatchId", err) } } diff --git a/typedapi/types/watchrecordqueuedstats.go b/typedapi/types/watchrecordqueuedstats.go index 888e00c944..f29cb25304 100644 --- a/typedapi/types/watchrecordqueuedstats.go +++ b/typedapi/types/watchrecordqueuedstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // WatchRecordQueuedStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/stats/types.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/stats/types.ts#L50-L52 type WatchRecordQueuedStats struct { ExecutionTime DateTime `json:"execution_time"` } @@ -51,7 +52,7 @@ func (s *WatchRecordQueuedStats) UnmarshalJSON(data []byte) error { case "execution_time": if err := dec.Decode(&s.ExecutionTime); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutionTime", err) } } diff --git a/typedapi/types/watchrecordstats.go b/typedapi/types/watchrecordstats.go index 9aa5fe23e6..6dbb0cdcc5 100644 --- a/typedapi/types/watchrecordstats.go +++ b/typedapi/types/watchrecordstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/executionphase" @@ -31,7 +32,7 @@ import ( // WatchRecordStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/stats/types.ts#L54-L60 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/stats/types.ts#L54-L60 type WatchRecordStats struct { ExecutedActions []string `json:"executed_actions,omitempty"` ExecutionPhase executionphase.ExecutionPhase `json:"execution_phase"` @@ -58,32 +59,32 @@ func (s *WatchRecordStats) UnmarshalJSON(data []byte) error { case "executed_actions": if err := dec.Decode(&s.ExecutedActions); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutedActions", err) } case "execution_phase": if err := dec.Decode(&s.ExecutionPhase); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutionPhase", err) } case "execution_time": if err := dec.Decode(&s.ExecutionTime); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutionTime", err) } case "triggered_time": if err := dec.Decode(&s.TriggeredTime); err != nil { - return err + return fmt.Errorf("%s | %w", "TriggeredTime", err) } case "watch_id": if err := dec.Decode(&s.WatchId); err != nil { - return err + return fmt.Errorf("%s | %w", "WatchId", err) } case "watch_record_id": if err := dec.Decode(&s.WatchRecordId); err != nil { - return err + return fmt.Errorf("%s | %w", "WatchRecordId", err) } } diff --git a/typedapi/types/watchstatus.go b/typedapi/types/watchstatus.go index 52310ce51b..423da25bc0 100644 --- a/typedapi/types/watchstatus.go +++ b/typedapi/types/watchstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // WatchStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Watch.ts#L49-L56 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Watch.ts#L49-L56 type WatchStatus struct { Actions WatcherStatusActions `json:"actions"` ExecutionState *string `json:"execution_state,omitempty"` @@ -57,13 +58,13 @@ func (s *WatchStatus) UnmarshalJSON(data []byte) error { case "actions": if err := dec.Decode(&s.Actions); err != nil { - return err + return fmt.Errorf("%s | %w", "Actions", err) } case "execution_state": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ExecutionState", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,22 +75,22 @@ func (s *WatchStatus) UnmarshalJSON(data []byte) error { case "last_checked": if err := dec.Decode(&s.LastChecked); err != nil { - return err + return fmt.Errorf("%s | %w", "LastChecked", err) } case "last_met_condition": if err := dec.Decode(&s.LastMetCondition); err != nil { - return err + return fmt.Errorf("%s | %w", "LastMetCondition", err) } case "state": if err := dec.Decode(&s.State); err != nil { - return err + return fmt.Errorf("%s | %w", "State", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/webhookaction.go b/typedapi/types/webhookaction.go index 27aa9a174a..0e04dd347a 100644 --- a/typedapi/types/webhookaction.go +++ b/typedapi/types/webhookaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -33,7 +34,7 @@ import ( // WebhookAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L293-L293 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L293-L293 type WebhookAction struct { Auth *HttpInputAuthentication `json:"auth,omitempty"` Body *string `json:"body,omitempty"` @@ -67,13 +68,13 @@ func (s *WebhookAction) UnmarshalJSON(data []byte) error { case "auth": if err := dec.Decode(&s.Auth); err != nil { - return err + return fmt.Errorf("%s | %w", "Auth", err) } case "body": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Body", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -84,7 +85,7 @@ func (s *WebhookAction) UnmarshalJSON(data []byte) error { case "connection_timeout": if err := dec.Decode(&s.ConnectionTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "ConnectionTimeout", err) } case "headers": @@ -92,17 +93,17 @@ func (s *WebhookAction) UnmarshalJSON(data []byte) error { s.Headers = make(map[string]string, 0) } if err := dec.Decode(&s.Headers); err != nil { - return err + return fmt.Errorf("%s | %w", "Headers", err) } case "host": if err := dec.Decode(&s.Host); err != nil { - return err + return fmt.Errorf("%s | %w", "Host", err) } case "method": if err := dec.Decode(&s.Method); err != nil { - return err + return fmt.Errorf("%s | %w", "Method", err) } case "params": @@ -110,13 +111,13 @@ func (s *WebhookAction) UnmarshalJSON(data []byte) error { s.Params = make(map[string]string, 0) } if err := dec.Decode(&s.Params); err != nil { - return err + return fmt.Errorf("%s | %w", "Params", err) } case "path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Path", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -127,28 +128,28 @@ func (s *WebhookAction) UnmarshalJSON(data []byte) error { case "port": if err := dec.Decode(&s.Port); err != nil { - return err + return fmt.Errorf("%s | %w", "Port", err) } case "proxy": if err := dec.Decode(&s.Proxy); err != nil { - return err + return fmt.Errorf("%s | %w", "Proxy", err) } case "read_timeout": if err := dec.Decode(&s.ReadTimeout); err != nil { - return err + return fmt.Errorf("%s | %w", "ReadTimeout", err) } case "scheme": if err := dec.Decode(&s.Scheme); err != nil { - return err + return fmt.Errorf("%s | %w", "Scheme", err) } case "url": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Url", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/webhookresult.go b/typedapi/types/webhookresult.go index dbc125089d..2f955b5d8e 100644 --- a/typedapi/types/webhookresult.go +++ b/typedapi/types/webhookresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // WebhookResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/_types/Actions.ts#L295-L298 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/_types/Actions.ts#L295-L298 type WebhookResult struct { Request HttpInputRequestResult `json:"request"` Response *HttpInputResponseResult `json:"response,omitempty"` diff --git a/typedapi/types/weightedaverageaggregation.go b/typedapi/types/weightedaverageaggregation.go index 30e6b14868..c0f0a78b0b 100644 --- a/typedapi/types/weightedaverageaggregation.go +++ b/typedapi/types/weightedaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // WeightedAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L432-L446 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L432-L446 type WeightedAverageAggregation struct { // Format A numeric response formatter. Format *string `json:"format,omitempty"` @@ -63,7 +64,7 @@ func (s *WeightedAverageAggregation) UnmarshalJSON(data []byte) error { case "format": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Format", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -74,13 +75,13 @@ func (s *WeightedAverageAggregation) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -91,17 +92,17 @@ func (s *WeightedAverageAggregation) UnmarshalJSON(data []byte) error { case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "value_type": if err := dec.Decode(&s.ValueType); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueType", err) } case "weight": if err := dec.Decode(&s.Weight); err != nil { - return err + return fmt.Errorf("%s | %w", "Weight", err) } } diff --git a/typedapi/types/weightedaveragevalue.go b/typedapi/types/weightedaveragevalue.go index 0a447eb08b..0c3a35ed73 100644 --- a/typedapi/types/weightedaveragevalue.go +++ b/typedapi/types/weightedaveragevalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // WeightedAverageValue type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/metric.ts#L448-L458 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/metric.ts#L448-L458 type WeightedAverageValue struct { // Field The field from which to extract the values or weights. Field *string `json:"field,omitempty"` @@ -56,7 +57,7 @@ func (s *WeightedAverageValue) UnmarshalJSON(data []byte) error { case "field": if err := dec.Decode(&s.Field); err != nil { - return err + return fmt.Errorf("%s | %w", "Field", err) } case "missing": @@ -66,7 +67,7 @@ func (s *WeightedAverageValue) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Missing", err) } f := Float64(value) s.Missing = &f @@ -78,7 +79,7 @@ func (s *WeightedAverageValue) UnmarshalJSON(data []byte) error { case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } keyDec := json.NewDecoder(bytes.NewReader(message)) for { @@ -87,7 +88,7 @@ func (s *WeightedAverageValue) UnmarshalJSON(data []byte) error { if errors.Is(err, io.EOF) { break } - return err + return fmt.Errorf("%s | %w", "Script", err) } switch t { @@ -96,7 +97,7 @@ func (s *WeightedAverageValue) UnmarshalJSON(data []byte) error { o := NewInlineScript() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o @@ -104,7 +105,7 @@ func (s *WeightedAverageValue) UnmarshalJSON(data []byte) error { o := NewStoredScriptId() localDec := json.NewDecoder(bytes.NewReader(message)) if err := localDec.Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Script", err) } s.Script = o diff --git a/typedapi/types/weightedavgaggregate.go b/typedapi/types/weightedavgaggregate.go index 9aa73cafac..d242060d68 100644 --- a/typedapi/types/weightedavgaggregate.go +++ b/typedapi/types/weightedavgaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // WeightedAvgAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/aggregations/Aggregate.ts#L212-L216 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/aggregations/Aggregate.ts#L212-L216 type WeightedAvgAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to @@ -57,18 +58,18 @@ func (s *WeightedAvgAggregate) UnmarshalJSON(data []byte) error { case "meta": if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "value": if err := dec.Decode(&s.Value); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } case "value_as_string": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ValueAsString", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/weightedtokensquery.go b/typedapi/types/weightedtokensquery.go index 99d85a5fdb..dd83218a63 100644 --- a/typedapi/types/weightedtokensquery.go +++ b/typedapi/types/weightedtokensquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // WeightedTokensQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/WeightedTokensQuery.ts#L27-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/WeightedTokensQuery.ts#L27-L32 type WeightedTokensQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -67,7 +68,7 @@ func (s *WeightedTokensQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -78,13 +79,13 @@ func (s *WeightedTokensQuery) UnmarshalJSON(data []byte) error { case "pruning_config": if err := dec.Decode(&s.PruningConfig); err != nil { - return err + return fmt.Errorf("%s | %w", "PruningConfig", err) } case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -98,7 +99,7 @@ func (s *WeightedTokensQuery) UnmarshalJSON(data []byte) error { s.Tokens = make(map[string]float32, 0) } if err := dec.Decode(&s.Tokens); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokens", err) } } diff --git a/typedapi/types/weights.go b/typedapi/types/weights.go index 6195f9f8d7..2e8895689e 100644 --- a/typedapi/types/weights.go +++ b/typedapi/types/weights.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // Weights type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/put_trained_model/types.ts#L108-L110 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/put_trained_model/types.ts#L108-L110 type Weights struct { Weights Float64 `json:"weights"` } @@ -57,7 +58,7 @@ func (s *Weights) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Weights", err) } f := Float64(value) s.Weights = f diff --git a/typedapi/types/whitespaceanalyzer.go b/typedapi/types/whitespaceanalyzer.go index a9268770af..9fee225211 100644 --- a/typedapi/types/whitespaceanalyzer.go +++ b/typedapi/types/whitespaceanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,12 +24,13 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" ) // WhitespaceAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/analyzers.ts#L108-L111 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/analyzers.ts#L108-L111 type WhitespaceAnalyzer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` @@ -52,12 +53,12 @@ func (s *WhitespaceAnalyzer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/whitespacetokenizer.go b/typedapi/types/whitespacetokenizer.go index bc7903880b..6c4ac25950 100644 --- a/typedapi/types/whitespacetokenizer.go +++ b/typedapi/types/whitespacetokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // WhitespaceTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/tokenizers.ts#L115-L118 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/tokenizers.ts#L115-L118 type WhitespaceTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Type string `json:"type,omitempty"` @@ -60,7 +61,7 @@ func (s *WhitespaceTokenizer) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MaxTokenLength", err) } s.MaxTokenLength = &value case float64: @@ -70,12 +71,12 @@ func (s *WhitespaceTokenizer) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/wildcardproperty.go b/typedapi/types/wildcardproperty.go index fd91c0d37e..9271bc0bce 100644 --- a/typedapi/types/wildcardproperty.go +++ b/typedapi/types/wildcardproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // WildcardProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/mapping/core.ts#L276-L283 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/mapping/core.ts#L277-L284 type WildcardProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -69,13 +70,13 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(string) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } s.CopyTo = append(s.CopyTo, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { - return err + return fmt.Errorf("%s | %w", "CopyTo", err) } } @@ -86,7 +87,7 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "DocValues", err) } s.DocValues = &value case bool: @@ -95,7 +96,7 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { - return err + return fmt.Errorf("%s | %w", "Dynamic", err) } case "fields": @@ -413,7 +414,7 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreAbove", err) } s.IgnoreAbove = &value case float64: @@ -426,13 +427,13 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { s.Meta = make(map[string]string, 0) } if err := dec.Decode(&s.Meta); err != nil { - return err + return fmt.Errorf("%s | %w", "Meta", err) } case "null_value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "NullValue", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -751,7 +752,7 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Similarity", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -767,7 +768,7 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Store", err) } s.Store = &value case bool: @@ -776,7 +777,7 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } } diff --git a/typedapi/types/wildcardquery.go b/typedapi/types/wildcardquery.go index 25f1a7fbac..89b9b58e1f 100644 --- a/typedapi/types/wildcardquery.go +++ b/typedapi/types/wildcardquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // WildcardQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/term.ts#L268-L285 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/term.ts#L268-L285 type WildcardQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -84,7 +85,7 @@ func (s *WildcardQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -100,7 +101,7 @@ func (s *WildcardQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CaseInsensitive", err) } s.CaseInsensitive = &value case bool: @@ -110,7 +111,7 @@ func (s *WildcardQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -121,13 +122,13 @@ func (s *WildcardQuery) UnmarshalJSON(data []byte) error { case "rewrite": if err := dec.Decode(&s.Rewrite); err != nil { - return err + return fmt.Errorf("%s | %w", "Rewrite", err) } case "value": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Value", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -139,7 +140,7 @@ func (s *WildcardQuery) UnmarshalJSON(data []byte) error { case "wildcard": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Wildcard", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/wktgeobounds.go b/typedapi/types/wktgeobounds.go index f4ddd197e4..6e4fc8f84e 100644 --- a/typedapi/types/wktgeobounds.go +++ b/typedapi/types/wktgeobounds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // WktGeoBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/Geo.ts#L150-L152 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/Geo.ts#L150-L152 type WktGeoBounds struct { Wkt string `json:"wkt"` } @@ -53,7 +54,7 @@ func (s *WktGeoBounds) UnmarshalJSON(data []byte) error { case "wkt": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Wkt", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/worddelimitergraphtokenfilter.go b/typedapi/types/worddelimitergraphtokenfilter.go index 8472b1d2d5..44009c87c6 100644 --- a/typedapi/types/worddelimitergraphtokenfilter.go +++ b/typedapi/types/worddelimitergraphtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // WordDelimiterGraphTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L149-L166 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L149-L166 type WordDelimiterGraphTokenFilter struct { AdjustOffsets *bool `json:"adjust_offsets,omitempty"` CatenateAll *bool `json:"catenate_all,omitempty"` @@ -73,7 +74,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "AdjustOffsets", err) } s.AdjustOffsets = &value case bool: @@ -87,7 +88,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CatenateAll", err) } s.CatenateAll = &value case bool: @@ -101,7 +102,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CatenateNumbers", err) } s.CatenateNumbers = &value case bool: @@ -115,7 +116,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CatenateWords", err) } s.CatenateWords = &value case bool: @@ -129,7 +130,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "GenerateNumberParts", err) } s.GenerateNumberParts = &value case bool: @@ -143,7 +144,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "GenerateWordParts", err) } s.GenerateWordParts = &value case bool: @@ -157,7 +158,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "IgnoreKeywords", err) } s.IgnoreKeywords = &value case bool: @@ -166,18 +167,18 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { case "preserve_original": if err := dec.Decode(&s.PreserveOriginal); err != nil { - return err + return fmt.Errorf("%s | %w", "PreserveOriginal", err) } case "protected_words": if err := dec.Decode(&s.ProtectedWords); err != nil { - return err + return fmt.Errorf("%s | %w", "ProtectedWords", err) } case "protected_words_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ProtectedWordsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -193,7 +194,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SplitOnCaseChange", err) } s.SplitOnCaseChange = &value case bool: @@ -207,7 +208,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SplitOnNumerics", err) } s.SplitOnNumerics = &value case bool: @@ -221,7 +222,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "StemEnglishPossessive", err) } s.StemEnglishPossessive = &value case bool: @@ -230,18 +231,18 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "type_table": if err := dec.Decode(&s.TypeTable); err != nil { - return err + return fmt.Errorf("%s | %w", "TypeTable", err) } case "type_table_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TypeTablePath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -252,7 +253,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/worddelimitertokenfilter.go b/typedapi/types/worddelimitertokenfilter.go index 5b75040bd3..a9f5b7a9be 100644 --- a/typedapi/types/worddelimitertokenfilter.go +++ b/typedapi/types/worddelimitertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // WordDelimiterTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/analysis/token_filters.ts#L132-L147 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/analysis/token_filters.ts#L132-L147 type WordDelimiterTokenFilter struct { CatenateAll *bool `json:"catenate_all,omitempty"` CatenateNumbers *bool `json:"catenate_numbers,omitempty"` @@ -71,7 +72,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CatenateAll", err) } s.CatenateAll = &value case bool: @@ -85,7 +86,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CatenateNumbers", err) } s.CatenateNumbers = &value case bool: @@ -99,7 +100,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "CatenateWords", err) } s.CatenateWords = &value case bool: @@ -113,7 +114,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "GenerateNumberParts", err) } s.GenerateNumberParts = &value case bool: @@ -127,7 +128,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "GenerateWordParts", err) } s.GenerateWordParts = &value case bool: @@ -136,18 +137,18 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { case "preserve_original": if err := dec.Decode(&s.PreserveOriginal); err != nil { - return err + return fmt.Errorf("%s | %w", "PreserveOriginal", err) } case "protected_words": if err := dec.Decode(&s.ProtectedWords); err != nil { - return err + return fmt.Errorf("%s | %w", "ProtectedWords", err) } case "protected_words_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ProtectedWordsPath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -163,7 +164,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SplitOnCaseChange", err) } s.SplitOnCaseChange = &value case bool: @@ -177,7 +178,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "SplitOnNumerics", err) } s.SplitOnNumerics = &value case bool: @@ -191,7 +192,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "StemEnglishPossessive", err) } s.StemEnglishPossessive = &value case bool: @@ -200,18 +201,18 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { case "type": if err := dec.Decode(&s.Type); err != nil { - return err + return fmt.Errorf("%s | %w", "Type", err) } case "type_table": if err := dec.Decode(&s.TypeTable); err != nil { - return err + return fmt.Errorf("%s | %w", "TypeTable", err) } case "type_table_path": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "TypeTablePath", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -222,7 +223,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } } diff --git a/typedapi/types/wrapperquery.go b/typedapi/types/wrapperquery.go index ac211c137c..4f87335c31 100644 --- a/typedapi/types/wrapperquery.go +++ b/typedapi/types/wrapperquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // WrapperQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_types/query_dsl/abstractions.ts#L481-L487 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_types/query_dsl/abstractions.ts#L481-L487 type WrapperQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -66,7 +67,7 @@ func (s *WrapperQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseFloat(v, 32) if err != nil { - return err + return fmt.Errorf("%s | %w", "Boost", err) } f := float32(value) s.Boost = &f @@ -78,7 +79,7 @@ func (s *WrapperQuery) UnmarshalJSON(data []byte) error { case "query": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -90,7 +91,7 @@ func (s *WrapperQuery) UnmarshalJSON(data []byte) error { case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "QueryName_", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) diff --git a/typedapi/types/writeoperation.go b/typedapi/types/writeoperation.go index a7e5fdb924..6462af6fb2 100644 --- a/typedapi/types/writeoperation.go +++ b/typedapi/types/writeoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,6 +24,7 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" @@ -32,7 +33,7 @@ import ( // WriteOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/_global/bulk/types.ts#L109-L128 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/_global/bulk/types.ts#L109-L128 type WriteOperation struct { // DynamicTemplates A map from the full name of fields to the name of dynamic templates. // Defaults to an empty map. @@ -81,12 +82,12 @@ func (s *WriteOperation) UnmarshalJSON(data []byte) error { s.DynamicTemplates = make(map[string]string, 0) } if err := dec.Decode(&s.DynamicTemplates); err != nil { - return err + return fmt.Errorf("%s | %w", "DynamicTemplates", err) } case "_id": if err := dec.Decode(&s.Id_); err != nil { - return err + return fmt.Errorf("%s | %w", "Id_", err) } case "if_primary_term": @@ -96,7 +97,7 @@ func (s *WriteOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "IfPrimaryTerm", err) } s.IfPrimaryTerm = &value case float64: @@ -106,18 +107,18 @@ func (s *WriteOperation) UnmarshalJSON(data []byte) error { case "if_seq_no": if err := dec.Decode(&s.IfSeqNo); err != nil { - return err + return fmt.Errorf("%s | %w", "IfSeqNo", err) } case "_index": if err := dec.Decode(&s.Index_); err != nil { - return err + return fmt.Errorf("%s | %w", "Index_", err) } case "pipeline": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Pipeline", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -133,7 +134,7 @@ func (s *WriteOperation) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "RequireAlias", err) } s.RequireAlias = &value case bool: @@ -142,17 +143,17 @@ func (s *WriteOperation) UnmarshalJSON(data []byte) error { case "routing": if err := dec.Decode(&s.Routing); err != nil { - return err + return fmt.Errorf("%s | %w", "Routing", err) } case "version": if err := dec.Decode(&s.Version); err != nil { - return err + return fmt.Errorf("%s | %w", "Version", err) } case "version_type": if err := dec.Decode(&s.VersionType); err != nil { - return err + return fmt.Errorf("%s | %w", "VersionType", err) } } diff --git a/typedapi/types/xpackdatafeed.go b/typedapi/types/xpackdatafeed.go index 0733b8e814..c7dcffdd5a 100644 --- a/typedapi/types/xpackdatafeed.go +++ b/typedapi/types/xpackdatafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // XpackDatafeed type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L77-L79 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L77-L79 type XpackDatafeed struct { Count int64 `json:"count"` } @@ -57,7 +58,7 @@ func (s *XpackDatafeed) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = value case float64: diff --git a/typedapi/types/xpackfeature.go b/typedapi/types/xpackfeature.go index fb9bbcc0ca..d30edcd72b 100644 --- a/typedapi/types/xpackfeature.go +++ b/typedapi/types/xpackfeature.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // XpackFeature type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/info/types.ts#L77-L82 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/info/types.ts#L77-L82 type XpackFeature struct { Available bool `json:"available"` Description *string `json:"description,omitempty"` @@ -60,7 +61,7 @@ func (s *XpackFeature) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -70,7 +71,7 @@ func (s *XpackFeature) UnmarshalJSON(data []byte) error { case "description": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "Description", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -86,7 +87,7 @@ func (s *XpackFeature) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -95,7 +96,7 @@ func (s *XpackFeature) UnmarshalJSON(data []byte) error { case "native_code_info": if err := dec.Decode(&s.NativeCodeInfo); err != nil { - return err + return fmt.Errorf("%s | %w", "NativeCodeInfo", err) } } diff --git a/typedapi/types/xpackfeatures.go b/typedapi/types/xpackfeatures.go index 4729b6a643..f68a222830 100644 --- a/typedapi/types/xpackfeatures.go +++ b/typedapi/types/xpackfeatures.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types // XpackFeatures type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/info/types.ts#L42-L75 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/info/types.ts#L42-L75 type XpackFeatures struct { AggregateMetric XpackFeature `json:"aggregate_metric"` Analytics XpackFeature `json:"analytics"` diff --git a/typedapi/types/xpackquery.go b/typedapi/types/xpackquery.go index 9e1d6ab673..c0e9e9fe0b 100644 --- a/typedapi/types/xpackquery.go +++ b/typedapi/types/xpackquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // XpackQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L259-L264 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L259-L264 type XpackQuery struct { Count *int `json:"count,omitempty"` Failed *int `json:"failed,omitempty"` @@ -61,7 +62,7 @@ func (s *XpackQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Count", err) } s.Count = &value case float64: @@ -77,7 +78,7 @@ func (s *XpackQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Failed", err) } s.Failed = &value case float64: @@ -93,7 +94,7 @@ func (s *XpackQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Paging", err) } s.Paging = &value case float64: @@ -109,7 +110,7 @@ func (s *XpackQuery) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Total", err) } s.Total = &value case float64: diff --git a/typedapi/types/xpackrealm.go b/typedapi/types/xpackrealm.go index 261232c733..e3cfb440e5 100644 --- a/typedapi/types/xpackrealm.go +++ b/typedapi/types/xpackrealm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // XpackRealm type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L417-L426 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L417-L426 type XpackRealm struct { Available bool `json:"available"` Cache []RealmCache `json:"cache,omitempty"` @@ -66,7 +67,7 @@ func (s *XpackRealm) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -75,7 +76,7 @@ func (s *XpackRealm) UnmarshalJSON(data []byte) error { case "cache": if err := dec.Decode(&s.Cache); err != nil { - return err + return fmt.Errorf("%s | %w", "Cache", err) } case "enabled": @@ -85,7 +86,7 @@ func (s *XpackRealm) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -94,37 +95,37 @@ func (s *XpackRealm) UnmarshalJSON(data []byte) error { case "has_authorization_realms": if err := dec.Decode(&s.HasAuthorizationRealms); err != nil { - return err + return fmt.Errorf("%s | %w", "HasAuthorizationRealms", err) } case "has_default_username_pattern": if err := dec.Decode(&s.HasDefaultUsernamePattern); err != nil { - return err + return fmt.Errorf("%s | %w", "HasDefaultUsernamePattern", err) } case "has_truststore": if err := dec.Decode(&s.HasTruststore); err != nil { - return err + return fmt.Errorf("%s | %w", "HasTruststore", err) } case "is_authentication_delegated": if err := dec.Decode(&s.IsAuthenticationDelegated); err != nil { - return err + return fmt.Errorf("%s | %w", "IsAuthenticationDelegated", err) } case "name": if err := dec.Decode(&s.Name); err != nil { - return err + return fmt.Errorf("%s | %w", "Name", err) } case "order": if err := dec.Decode(&s.Order); err != nil { - return err + return fmt.Errorf("%s | %w", "Order", err) } case "size": if err := dec.Decode(&s.Size); err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } } diff --git a/typedapi/types/xpackrolemapping.go b/typedapi/types/xpackrolemapping.go index 8a6cc519c9..fe0603bc30 100644 --- a/typedapi/types/xpackrolemapping.go +++ b/typedapi/types/xpackrolemapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // XpackRoleMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L270-L273 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L270-L273 type XpackRoleMapping struct { Enabled int `json:"enabled"` Size int `json:"size"` @@ -59,7 +60,7 @@ func (s *XpackRoleMapping) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case float64: @@ -75,7 +76,7 @@ func (s *XpackRoleMapping) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = value case float64: diff --git a/typedapi/types/xpackruntimefieldtypes.go b/typedapi/types/xpackruntimefieldtypes.go index fd4a48e874..afcd5a3227 100644 --- a/typedapi/types/xpackruntimefieldtypes.go +++ b/typedapi/types/xpackruntimefieldtypes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // XpackRuntimeFieldTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/types.ts#L275-L277 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/types.ts#L275-L277 type XpackRuntimeFieldTypes struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -59,7 +60,7 @@ func (s *XpackRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Available", err) } s.Available = value case bool: @@ -73,7 +74,7 @@ func (s *XpackRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Enabled", err) } s.Enabled = value case bool: @@ -82,7 +83,7 @@ func (s *XpackRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "field_types": if err := dec.Decode(&s.FieldTypes); err != nil { - return err + return fmt.Errorf("%s | %w", "FieldTypes", err) } } diff --git a/typedapi/types/zeroshotclassificationinferenceoptions.go b/typedapi/types/zeroshotclassificationinferenceoptions.go index 35890dd5d3..5649809b5b 100644 --- a/typedapi/types/zeroshotclassificationinferenceoptions.go +++ b/typedapi/types/zeroshotclassificationinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ZeroShotClassificationInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L201-L222 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L201-L222 type ZeroShotClassificationInferenceOptions struct { // ClassificationLabels The zero shot classification labels indicating entailment, neutral, and // contradiction @@ -66,13 +67,13 @@ func (s *ZeroShotClassificationInferenceOptions) UnmarshalJSON(data []byte) erro case "classification_labels": if err := dec.Decode(&s.ClassificationLabels); err != nil { - return err + return fmt.Errorf("%s | %w", "ClassificationLabels", err) } case "hypothesis_template": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "HypothesisTemplate", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -83,7 +84,7 @@ func (s *ZeroShotClassificationInferenceOptions) UnmarshalJSON(data []byte) erro case "labels": if err := dec.Decode(&s.Labels); err != nil { - return err + return fmt.Errorf("%s | %w", "Labels", err) } case "multi_label": @@ -93,7 +94,7 @@ func (s *ZeroShotClassificationInferenceOptions) UnmarshalJSON(data []byte) erro case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MultiLabel", err) } s.MultiLabel = &value case bool: @@ -103,7 +104,7 @@ func (s *ZeroShotClassificationInferenceOptions) UnmarshalJSON(data []byte) erro case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -114,7 +115,7 @@ func (s *ZeroShotClassificationInferenceOptions) UnmarshalJSON(data []byte) erro case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/types/zeroshotclassificationinferenceupdateoptions.go b/typedapi/types/zeroshotclassificationinferenceupdateoptions.go index 387e280b17..de0d814eff 100644 --- a/typedapi/types/zeroshotclassificationinferenceupdateoptions.go +++ b/typedapi/types/zeroshotclassificationinferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package types @@ -24,13 +24,14 @@ import ( "bytes" "encoding/json" "errors" + "fmt" "io" "strconv" ) // ZeroShotClassificationInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/ml/_types/inference.ts#L374-L383 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/ml/_types/inference.ts#L374-L383 type ZeroShotClassificationInferenceUpdateOptions struct { // Labels The labels to predict. Labels []string `json:"labels"` @@ -61,7 +62,7 @@ func (s *ZeroShotClassificationInferenceUpdateOptions) UnmarshalJSON(data []byte case "labels": if err := dec.Decode(&s.Labels); err != nil { - return err + return fmt.Errorf("%s | %w", "Labels", err) } case "multi_label": @@ -71,7 +72,7 @@ func (s *ZeroShotClassificationInferenceUpdateOptions) UnmarshalJSON(data []byte case string: value, err := strconv.ParseBool(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "MultiLabel", err) } s.MultiLabel = &value case bool: @@ -81,7 +82,7 @@ func (s *ZeroShotClassificationInferenceUpdateOptions) UnmarshalJSON(data []byte case "results_field": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ResultsField", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -92,7 +93,7 @@ func (s *ZeroShotClassificationInferenceUpdateOptions) UnmarshalJSON(data []byte case "tokenization": if err := dec.Decode(&s.Tokenization); err != nil { - return err + return fmt.Errorf("%s | %w", "Tokenization", err) } } diff --git a/typedapi/watcher/ackwatch/ack_watch.go b/typedapi/watcher/ackwatch/ack_watch.go index e907b431cf..8a795b3804 100644 --- a/typedapi/watcher/ackwatch/ack_watch.go +++ b/typedapi/watcher/ackwatch/ack_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Acknowledges a watch, manually throttling the execution of the watch's // actions. diff --git a/typedapi/watcher/ackwatch/response.go b/typedapi/watcher/ackwatch/response.go index 34da00d038..2daf3fcd93 100644 --- a/typedapi/watcher/ackwatch/response.go +++ b/typedapi/watcher/ackwatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package ackwatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package ackwatch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/ack_watch/WatcherAckWatchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/ack_watch/WatcherAckWatchResponse.ts#L22-L24 type Response struct { Status types.WatchStatus `json:"status"` } diff --git a/typedapi/watcher/activatewatch/activate_watch.go b/typedapi/watcher/activatewatch/activate_watch.go index 5c6de41be3..5ef8925892 100644 --- a/typedapi/watcher/activatewatch/activate_watch.go +++ b/typedapi/watcher/activatewatch/activate_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Activates a currently inactive watch. package activatewatch diff --git a/typedapi/watcher/activatewatch/response.go b/typedapi/watcher/activatewatch/response.go index 2b7ee30c4e..2af0d5745c 100644 --- a/typedapi/watcher/activatewatch/response.go +++ b/typedapi/watcher/activatewatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package activatewatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package activatewatch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/activate_watch/WatcherActivateWatchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/activate_watch/WatcherActivateWatchResponse.ts#L22-L24 type Response struct { Status types.ActivationStatus `json:"status"` } diff --git a/typedapi/watcher/deactivatewatch/deactivate_watch.go b/typedapi/watcher/deactivatewatch/deactivate_watch.go index db85b56622..34bd841acf 100644 --- a/typedapi/watcher/deactivatewatch/deactivate_watch.go +++ b/typedapi/watcher/deactivatewatch/deactivate_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Deactivates a currently active watch. package deactivatewatch diff --git a/typedapi/watcher/deactivatewatch/response.go b/typedapi/watcher/deactivatewatch/response.go index bb78ea1cf4..fa2a4bba02 100644 --- a/typedapi/watcher/deactivatewatch/response.go +++ b/typedapi/watcher/deactivatewatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deactivatewatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deactivatewatch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/deactivate_watch/DeactivateWatchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/deactivate_watch/DeactivateWatchResponse.ts#L22-L24 type Response struct { Status types.ActivationStatus `json:"status"` } diff --git a/typedapi/watcher/deletewatch/delete_watch.go b/typedapi/watcher/deletewatch/delete_watch.go index a92dcf7b97..cd1304b8a5 100644 --- a/typedapi/watcher/deletewatch/delete_watch.go +++ b/typedapi/watcher/deletewatch/delete_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Removes a watch from Watcher. package deletewatch diff --git a/typedapi/watcher/deletewatch/response.go b/typedapi/watcher/deletewatch/response.go index 63bd174364..677ee20453 100644 --- a/typedapi/watcher/deletewatch/response.go +++ b/typedapi/watcher/deletewatch/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package deletewatch // Response holds the response body struct for the package deletewatch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/delete_watch/DeleteWatchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/delete_watch/DeleteWatchResponse.ts#L22-L24 type Response struct { Found bool `json:"found"` Id_ string `json:"_id"` diff --git a/typedapi/watcher/executewatch/execute_watch.go b/typedapi/watcher/executewatch/execute_watch.go index 7d976737d9..09a1858e5d 100644 --- a/typedapi/watcher/executewatch/execute_watch.go +++ b/typedapi/watcher/executewatch/execute_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Forces the execution of a stored watch. package executewatch diff --git a/typedapi/watcher/executewatch/request.go b/typedapi/watcher/executewatch/request.go index 2803718dcf..d627a5aba4 100644 --- a/typedapi/watcher/executewatch/request.go +++ b/typedapi/watcher/executewatch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package executewatch @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package executewatch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/execute_watch/WatcherExecuteWatchRequest.ts#L28-L79 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/execute_watch/WatcherExecuteWatchRequest.ts#L28-L79 type Request struct { // ActionModes Determines how to handle the watch actions as part of the watch execution. diff --git a/typedapi/watcher/executewatch/response.go b/typedapi/watcher/executewatch/response.go index 527b902136..eadb688ecc 100644 --- a/typedapi/watcher/executewatch/response.go +++ b/typedapi/watcher/executewatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package executewatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package executewatch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/execute_watch/WatcherExecuteWatchResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/execute_watch/WatcherExecuteWatchResponse.ts#L23-L25 type Response struct { Id_ string `json:"_id"` WatchRecord types.WatchRecord `json:"watch_record"` diff --git a/typedapi/watcher/getsettings/get_settings.go b/typedapi/watcher/getsettings/get_settings.go index 53545c20a9..c58439b85d 100644 --- a/typedapi/watcher/getsettings/get_settings.go +++ b/typedapi/watcher/getsettings/get_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieve settings for the watcher system index package getsettings diff --git a/typedapi/watcher/getwatch/get_watch.go b/typedapi/watcher/getwatch/get_watch.go index cc2189f87f..0331cd3789 100644 --- a/typedapi/watcher/getwatch/get_watch.go +++ b/typedapi/watcher/getwatch/get_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves a watch by its ID. package getwatch diff --git a/typedapi/watcher/getwatch/response.go b/typedapi/watcher/getwatch/response.go index 2224cb88de..7a648fb54c 100644 --- a/typedapi/watcher/getwatch/response.go +++ b/typedapi/watcher/getwatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package getwatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getwatch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/get_watch/GetWatchResponse.ts#L24-L34 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/get_watch/GetWatchResponse.ts#L24-L34 type Response struct { Found bool `json:"found"` Id_ string `json:"_id"` diff --git a/typedapi/watcher/putwatch/put_watch.go b/typedapi/watcher/putwatch/put_watch.go index 9bb6e78078..2dc693756c 100644 --- a/typedapi/watcher/putwatch/put_watch.go +++ b/typedapi/watcher/putwatch/put_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Creates a new watch, or updates an existing one. package putwatch diff --git a/typedapi/watcher/putwatch/request.go b/typedapi/watcher/putwatch/request.go index f9b70e0707..e74ece3a8d 100644 --- a/typedapi/watcher/putwatch/request.go +++ b/typedapi/watcher/putwatch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putwatch @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putwatch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/put_watch/WatcherPutWatchRequest.ts#L30-L53 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/put_watch/WatcherPutWatchRequest.ts#L30-L53 type Request struct { Actions map[string]types.WatcherAction `json:"actions,omitempty"` Condition *types.WatcherCondition `json:"condition,omitempty"` @@ -83,28 +83,28 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Actions = make(map[string]types.WatcherAction, 0) } if err := dec.Decode(&s.Actions); err != nil { - return err + return fmt.Errorf("%s | %w", "Actions", err) } case "condition": if err := dec.Decode(&s.Condition); err != nil { - return err + return fmt.Errorf("%s | %w", "Condition", err) } case "input": if err := dec.Decode(&s.Input); err != nil { - return err + return fmt.Errorf("%s | %w", "Input", err) } case "metadata": if err := dec.Decode(&s.Metadata); err != nil { - return err + return fmt.Errorf("%s | %w", "Metadata", err) } case "throttle_period": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return err + return fmt.Errorf("%s | %w", "ThrottlePeriod", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) @@ -115,12 +115,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "transform": if err := dec.Decode(&s.Transform); err != nil { - return err + return fmt.Errorf("%s | %w", "Transform", err) } case "trigger": if err := dec.Decode(&s.Trigger); err != nil { - return err + return fmt.Errorf("%s | %w", "Trigger", err) } } diff --git a/typedapi/watcher/putwatch/response.go b/typedapi/watcher/putwatch/response.go index e418520de2..12b38fd312 100644 --- a/typedapi/watcher/putwatch/response.go +++ b/typedapi/watcher/putwatch/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package putwatch // Response holds the response body struct for the package putwatch // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/put_watch/WatcherPutWatchResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/put_watch/WatcherPutWatchResponse.ts#L23-L31 type Response struct { Created bool `json:"created"` Id_ string `json:"_id"` diff --git a/typedapi/watcher/querywatches/query_watches.go b/typedapi/watcher/querywatches/query_watches.go index 9861836a75..663ba6d713 100644 --- a/typedapi/watcher/querywatches/query_watches.go +++ b/typedapi/watcher/querywatches/query_watches.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves stored watches. package querywatches diff --git a/typedapi/watcher/querywatches/request.go b/typedapi/watcher/querywatches/request.go index 6b253eb2db..6a1e752acb 100644 --- a/typedapi/watcher/querywatches/request.go +++ b/typedapi/watcher/querywatches/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package querywatches @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package querywatches // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/query_watches/WatcherQueryWatchesRequest.ts#L25-L48 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/query_watches/WatcherQueryWatchesRequest.ts#L25-L48 type Request struct { // From The offset from the first result to fetch. Needs to be non-negative. @@ -88,7 +88,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "From", err) } s.From = &value case float64: @@ -98,12 +98,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "query": if err := dec.Decode(&s.Query); err != nil { - return err + return fmt.Errorf("%s | %w", "Query", err) } case "search_after": if err := dec.Decode(&s.SearchAfter); err != nil { - return err + return fmt.Errorf("%s | %w", "SearchAfter", err) } case "size": @@ -114,7 +114,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case string: value, err := strconv.Atoi(v) if err != nil { - return err + return fmt.Errorf("%s | %w", "Size", err) } s.Size = &value case float64: @@ -128,13 +128,13 @@ func (s *Request) UnmarshalJSON(data []byte) error { if !bytes.HasPrefix(rawMsg, []byte("[")) { o := new(types.SortCombinations) if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } s.Sort = append(s.Sort, *o) } else { if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { - return err + return fmt.Errorf("%s | %w", "Sort", err) } } diff --git a/typedapi/watcher/querywatches/response.go b/typedapi/watcher/querywatches/response.go index 5e40febd55..eb4684526c 100644 --- a/typedapi/watcher/querywatches/response.go +++ b/typedapi/watcher/querywatches/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package querywatches @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package querywatches // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/query_watches/WatcherQueryWatchesResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/query_watches/WatcherQueryWatchesResponse.ts#L23-L28 type Response struct { Count int `json:"count"` Watches []types.QueryWatch `json:"watches"` diff --git a/typedapi/watcher/start/response.go b/typedapi/watcher/start/response.go index 7b34197d24..bb34de703f 100644 --- a/typedapi/watcher/start/response.go +++ b/typedapi/watcher/start/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package start // Response holds the response body struct for the package start // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/start/WatcherStartResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/start/WatcherStartResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/watcher/start/start.go b/typedapi/watcher/start/start.go index 90517d73e7..f94e44675a 100644 --- a/typedapi/watcher/start/start.go +++ b/typedapi/watcher/start/start.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Starts Watcher if it is not already running. package start diff --git a/typedapi/watcher/stats/response.go b/typedapi/watcher/stats/response.go index 256a9ae6ce..d5955cb5c9 100644 --- a/typedapi/watcher/stats/response.go +++ b/typedapi/watcher/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/stats/WatcherStatsResponse.ts#L24-L32 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/stats/WatcherStatsResponse.ts#L24-L32 type Response struct { ClusterName string `json:"cluster_name"` ManuallyStopped bool `json:"manually_stopped"` diff --git a/typedapi/watcher/stats/stats.go b/typedapi/watcher/stats/stats.go index ea43f89738..2fc5dafb91 100644 --- a/typedapi/watcher/stats/stats.go +++ b/typedapi/watcher/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves the current Watcher metrics. package stats diff --git a/typedapi/watcher/stop/response.go b/typedapi/watcher/stop/response.go index 148ff6d03a..8a2962619f 100644 --- a/typedapi/watcher/stop/response.go +++ b/typedapi/watcher/stop/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package stop // Response holds the response body struct for the package stop // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/watcher/stop/WatcherStopResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/watcher/stop/WatcherStopResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/watcher/stop/stop.go b/typedapi/watcher/stop/stop.go index 00367c3fcb..db221bee9d 100644 --- a/typedapi/watcher/stop/stop.go +++ b/typedapi/watcher/stop/stop.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Stops Watcher if it is running. package stop diff --git a/typedapi/watcher/updatesettings/update_settings.go b/typedapi/watcher/updatesettings/update_settings.go index e7268c5224..3e8e42c440 100644 --- a/typedapi/watcher/updatesettings/update_settings.go +++ b/typedapi/watcher/updatesettings/update_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Update settings for the watcher system index package updatesettings diff --git a/typedapi/xpack/info/info.go b/typedapi/xpack/info/info.go index cb549faf6b..32c0444582 100644 --- a/typedapi/xpack/info/info.go +++ b/typedapi/xpack/info/info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves information about the installed X-Pack features. package info diff --git a/typedapi/xpack/info/response.go b/typedapi/xpack/info/response.go index 79622ab4c0..471f7d00dc 100644 --- a/typedapi/xpack/info/response.go +++ b/typedapi/xpack/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/info/XPackInfoResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/info/XPackInfoResponse.ts#L22-L29 type Response struct { Build types.BuildInformation `json:"build"` Features types.XpackFeatures `json:"features"` diff --git a/typedapi/xpack/usage/response.go b/typedapi/xpack/usage/response.go index 4c32969216..e929b3a268 100644 --- a/typedapi/xpack/usage/response.go +++ b/typedapi/xpack/usage/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 package usage @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package usage // -// https://github.com/elastic/elasticsearch-specification/blob/6e0fb6b929f337b62bf0676bdf503e061121fad2/specification/xpack/usage/XPackUsageResponse.ts#L43-L79 +// https://github.com/elastic/elasticsearch-specification/blob/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339/specification/xpack/usage/XPackUsageResponse.ts#L43-L79 type Response struct { AggregateMetric types.Base `json:"aggregate_metric"` Analytics types.Analytics `json:"analytics"` diff --git a/typedapi/xpack/usage/usage.go b/typedapi/xpack/usage/usage.go index de9c910e0d..fe4636ac64 100644 --- a/typedapi/xpack/usage/usage.go +++ b/typedapi/xpack/usage/usage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/6e0fb6b929f337b62bf0676bdf503e061121fad2 +// https://github.com/elastic/elasticsearch-specification/tree/accc26662ab4c58f4f6fb0fc1d9fc5249d0de339 // Retrieves usage information about the installed X-Pack features. package usage