1
0
mirror of https://github.com/rancher/types.git synced 2025-08-30 01:22:42 +00:00

generate code

This commit is contained in:
Aiwantaozi 2018-12-21 15:18:19 +08:00 committed by Craig Jellick
parent 8bcbea18a1
commit 771d9ad9fa
8 changed files with 264 additions and 97 deletions

View File

@ -1213,7 +1213,8 @@ func (in *ClusterLoggingList) DeepCopyObject() runtime.Object {
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ClusterLoggingSpec) DeepCopyInto(out *ClusterLoggingSpec) {
*out = *in
in.LoggingCommonSpec.DeepCopyInto(&out.LoggingCommonSpec)
in.LoggingTargets.DeepCopyInto(&out.LoggingTargets)
in.LoggingCommonField.DeepCopyInto(&out.LoggingCommonField)
return
}
@ -1622,6 +1623,23 @@ func (in *ClusterStatus) DeepCopy() *ClusterStatus {
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ClusterTestInput) DeepCopyInto(out *ClusterTestInput) {
*out = *in
in.LoggingTargets.DeepCopyInto(&out.LoggingTargets)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterTestInput.
func (in *ClusterTestInput) DeepCopy() *ClusterTestInput {
if in == nil {
return nil
}
out := new(ClusterTestInput)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *CommonGroupField) DeepCopyInto(out *CommonGroupField) {
*out = *in
@ -1856,6 +1874,22 @@ func (in *CustomConfig) DeepCopy() *CustomConfig {
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *CustomTargetConfig) DeepCopyInto(out *CustomTargetConfig) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CustomTargetConfig.
func (in *CustomTargetConfig) DeepCopy() *CustomTargetConfig {
if in == nil {
return nil
}
out := new(CustomTargetConfig)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DiskVsphereOpts) DeepCopyInto(out *DiskVsphereOpts) {
*out = *in
@ -3572,7 +3606,7 @@ func (in *LocalConfig) DeepCopyObject() runtime.Object {
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *LoggingCommonSpec) DeepCopyInto(out *LoggingCommonSpec) {
func (in *LoggingCommonField) DeepCopyInto(out *LoggingCommonField) {
*out = *in
if in.OutputTags != nil {
in, out := &in.OutputTags, &out.OutputTags
@ -3581,40 +3615,15 @@ func (in *LoggingCommonSpec) DeepCopyInto(out *LoggingCommonSpec) {
(*out)[key] = val
}
}
if in.ElasticsearchConfig != nil {
in, out := &in.ElasticsearchConfig, &out.ElasticsearchConfig
*out = new(ElasticsearchConfig)
**out = **in
}
if in.SplunkConfig != nil {
in, out := &in.SplunkConfig, &out.SplunkConfig
*out = new(SplunkConfig)
**out = **in
}
if in.KafkaConfig != nil {
in, out := &in.KafkaConfig, &out.KafkaConfig
*out = new(KafkaConfig)
(*in).DeepCopyInto(*out)
}
if in.SyslogConfig != nil {
in, out := &in.SyslogConfig, &out.SyslogConfig
*out = new(SyslogConfig)
**out = **in
}
if in.FluentForwarderConfig != nil {
in, out := &in.FluentForwarderConfig, &out.FluentForwarderConfig
*out = new(FluentForwarderConfig)
(*in).DeepCopyInto(*out)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoggingCommonSpec.
func (in *LoggingCommonSpec) DeepCopy() *LoggingCommonSpec {
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoggingCommonField.
func (in *LoggingCommonField) DeepCopy() *LoggingCommonField {
if in == nil {
return nil
}
out := new(LoggingCommonSpec)
out := new(LoggingCommonField)
in.DeepCopyInto(out)
return out
}
@ -3651,6 +3660,52 @@ func (in *LoggingSystemImages) DeepCopy() *LoggingSystemImages {
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *LoggingTargets) DeepCopyInto(out *LoggingTargets) {
*out = *in
if in.ElasticsearchConfig != nil {
in, out := &in.ElasticsearchConfig, &out.ElasticsearchConfig
*out = new(ElasticsearchConfig)
**out = **in
}
if in.SplunkConfig != nil {
in, out := &in.SplunkConfig, &out.SplunkConfig
*out = new(SplunkConfig)
**out = **in
}
if in.KafkaConfig != nil {
in, out := &in.KafkaConfig, &out.KafkaConfig
*out = new(KafkaConfig)
(*in).DeepCopyInto(*out)
}
if in.SyslogConfig != nil {
in, out := &in.SyslogConfig, &out.SyslogConfig
*out = new(SyslogConfig)
**out = **in
}
if in.FluentForwarderConfig != nil {
in, out := &in.FluentForwarderConfig, &out.FluentForwarderConfig
*out = new(FluentForwarderConfig)
(*in).DeepCopyInto(*out)
}
if in.CustomTargetConfig != nil {
in, out := &in.CustomTargetConfig, &out.CustomTargetConfig
*out = new(CustomTargetConfig)
**out = **in
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoggingTargets.
func (in *LoggingTargets) DeepCopy() *LoggingTargets {
if in == nil {
return nil
}
out := new(LoggingTargets)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in MapStringInterface) DeepCopyInto(out *MapStringInterface) {
{
@ -5926,7 +5981,8 @@ func (in *ProjectLoggingList) DeepCopyObject() runtime.Object {
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ProjectLoggingSpec) DeepCopyInto(out *ProjectLoggingSpec) {
*out = *in
in.LoggingCommonSpec.DeepCopyInto(&out.LoggingCommonSpec)
in.LoggingTargets.DeepCopyInto(&out.LoggingTargets)
in.LoggingCommonField.DeepCopyInto(&out.LoggingCommonField)
return
}
@ -6284,6 +6340,23 @@ func (in *ProjectStatus) DeepCopy() *ProjectStatus {
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ProjectTestInput) DeepCopyInto(out *ProjectTestInput) {
*out = *in
in.LoggingTargets.DeepCopyInto(&out.LoggingTargets)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ProjectTestInput.
func (in *ProjectTestInput) DeepCopy() *ProjectTestInput {
if in == nil {
return nil
}
out := new(ProjectTestInput)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *PublicEndpoint) DeepCopyInto(out *PublicEndpoint) {
*out = *in

View File

@ -5,57 +5,61 @@ import (
)
const (
ClusterLoggingType = "clusterLogging"
ClusterLoggingFieldAnnotations = "annotations"
ClusterLoggingFieldAppliedSpec = "appliedSpec"
ClusterLoggingFieldClusterID = "clusterId"
ClusterLoggingFieldConditions = "conditions"
ClusterLoggingFieldCreated = "created"
ClusterLoggingFieldCreatorID = "creatorId"
ClusterLoggingFieldElasticsearchConfig = "elasticsearchConfig"
ClusterLoggingFieldFailedSpec = "failedSpec"
ClusterLoggingFieldFluentForwarderConfig = "fluentForwarderConfig"
ClusterLoggingFieldKafkaConfig = "kafkaConfig"
ClusterLoggingFieldLabels = "labels"
ClusterLoggingFieldName = "name"
ClusterLoggingFieldNamespaceId = "namespaceId"
ClusterLoggingFieldOutputFlushInterval = "outputFlushInterval"
ClusterLoggingFieldOutputTags = "outputTags"
ClusterLoggingFieldOwnerReferences = "ownerReferences"
ClusterLoggingFieldRemoved = "removed"
ClusterLoggingFieldSplunkConfig = "splunkConfig"
ClusterLoggingFieldState = "state"
ClusterLoggingFieldSyslogConfig = "syslogConfig"
ClusterLoggingFieldTransitioning = "transitioning"
ClusterLoggingFieldTransitioningMessage = "transitioningMessage"
ClusterLoggingFieldUUID = "uuid"
ClusterLoggingType = "clusterLogging"
ClusterLoggingFieldAnnotations = "annotations"
ClusterLoggingFieldAppliedSpec = "appliedSpec"
ClusterLoggingFieldClusterID = "clusterId"
ClusterLoggingFieldConditions = "conditions"
ClusterLoggingFieldCreated = "created"
ClusterLoggingFieldCreatorID = "creatorId"
ClusterLoggingFieldCustomTargetConfig = "customTargetConfig"
ClusterLoggingFieldElasticsearchConfig = "elasticsearchConfig"
ClusterLoggingFieldExcludeSystemComponent = "excludeSystemComponent"
ClusterLoggingFieldFailedSpec = "failedSpec"
ClusterLoggingFieldFluentForwarderConfig = "fluentForwarderConfig"
ClusterLoggingFieldKafkaConfig = "kafkaConfig"
ClusterLoggingFieldLabels = "labels"
ClusterLoggingFieldName = "name"
ClusterLoggingFieldNamespaceId = "namespaceId"
ClusterLoggingFieldOutputFlushInterval = "outputFlushInterval"
ClusterLoggingFieldOutputTags = "outputTags"
ClusterLoggingFieldOwnerReferences = "ownerReferences"
ClusterLoggingFieldRemoved = "removed"
ClusterLoggingFieldSplunkConfig = "splunkConfig"
ClusterLoggingFieldState = "state"
ClusterLoggingFieldSyslogConfig = "syslogConfig"
ClusterLoggingFieldTransitioning = "transitioning"
ClusterLoggingFieldTransitioningMessage = "transitioningMessage"
ClusterLoggingFieldUUID = "uuid"
)
type ClusterLogging struct {
types.Resource
Annotations map[string]string `json:"annotations,omitempty" yaml:"annotations,omitempty"`
AppliedSpec *ClusterLoggingSpec `json:"appliedSpec,omitempty" yaml:"appliedSpec,omitempty"`
ClusterID string `json:"clusterId,omitempty" yaml:"clusterId,omitempty"`
Conditions []LoggingCondition `json:"conditions,omitempty" yaml:"conditions,omitempty"`
Created string `json:"created,omitempty" yaml:"created,omitempty"`
CreatorID string `json:"creatorId,omitempty" yaml:"creatorId,omitempty"`
ElasticsearchConfig *ElasticsearchConfig `json:"elasticsearchConfig,omitempty" yaml:"elasticsearchConfig,omitempty"`
FailedSpec *ClusterLoggingSpec `json:"failedSpec,omitempty" yaml:"failedSpec,omitempty"`
FluentForwarderConfig *FluentForwarderConfig `json:"fluentForwarderConfig,omitempty" yaml:"fluentForwarderConfig,omitempty"`
KafkaConfig *KafkaConfig `json:"kafkaConfig,omitempty" yaml:"kafkaConfig,omitempty"`
Labels map[string]string `json:"labels,omitempty" yaml:"labels,omitempty"`
Name string `json:"name,omitempty" yaml:"name,omitempty"`
NamespaceId string `json:"namespaceId,omitempty" yaml:"namespaceId,omitempty"`
OutputFlushInterval int64 `json:"outputFlushInterval,omitempty" yaml:"outputFlushInterval,omitempty"`
OutputTags map[string]string `json:"outputTags,omitempty" yaml:"outputTags,omitempty"`
OwnerReferences []OwnerReference `json:"ownerReferences,omitempty" yaml:"ownerReferences,omitempty"`
Removed string `json:"removed,omitempty" yaml:"removed,omitempty"`
SplunkConfig *SplunkConfig `json:"splunkConfig,omitempty" yaml:"splunkConfig,omitempty"`
State string `json:"state,omitempty" yaml:"state,omitempty"`
SyslogConfig *SyslogConfig `json:"syslogConfig,omitempty" yaml:"syslogConfig,omitempty"`
Transitioning string `json:"transitioning,omitempty" yaml:"transitioning,omitempty"`
TransitioningMessage string `json:"transitioningMessage,omitempty" yaml:"transitioningMessage,omitempty"`
UUID string `json:"uuid,omitempty" yaml:"uuid,omitempty"`
Annotations map[string]string `json:"annotations,omitempty" yaml:"annotations,omitempty"`
AppliedSpec *ClusterLoggingSpec `json:"appliedSpec,omitempty" yaml:"appliedSpec,omitempty"`
ClusterID string `json:"clusterId,omitempty" yaml:"clusterId,omitempty"`
Conditions []LoggingCondition `json:"conditions,omitempty" yaml:"conditions,omitempty"`
Created string `json:"created,omitempty" yaml:"created,omitempty"`
CreatorID string `json:"creatorId,omitempty" yaml:"creatorId,omitempty"`
CustomTargetConfig *CustomTargetConfig `json:"customTargetConfig,omitempty" yaml:"customTargetConfig,omitempty"`
ElasticsearchConfig *ElasticsearchConfig `json:"elasticsearchConfig,omitempty" yaml:"elasticsearchConfig,omitempty"`
ExcludeSystemComponent bool `json:"excludeSystemComponent,omitempty" yaml:"excludeSystemComponent,omitempty"`
FailedSpec *ClusterLoggingSpec `json:"failedSpec,omitempty" yaml:"failedSpec,omitempty"`
FluentForwarderConfig *FluentForwarderConfig `json:"fluentForwarderConfig,omitempty" yaml:"fluentForwarderConfig,omitempty"`
KafkaConfig *KafkaConfig `json:"kafkaConfig,omitempty" yaml:"kafkaConfig,omitempty"`
Labels map[string]string `json:"labels,omitempty" yaml:"labels,omitempty"`
Name string `json:"name,omitempty" yaml:"name,omitempty"`
NamespaceId string `json:"namespaceId,omitempty" yaml:"namespaceId,omitempty"`
OutputFlushInterval int64 `json:"outputFlushInterval,omitempty" yaml:"outputFlushInterval,omitempty"`
OutputTags map[string]string `json:"outputTags,omitempty" yaml:"outputTags,omitempty"`
OwnerReferences []OwnerReference `json:"ownerReferences,omitempty" yaml:"ownerReferences,omitempty"`
Removed string `json:"removed,omitempty" yaml:"removed,omitempty"`
SplunkConfig *SplunkConfig `json:"splunkConfig,omitempty" yaml:"splunkConfig,omitempty"`
State string `json:"state,omitempty" yaml:"state,omitempty"`
SyslogConfig *SyslogConfig `json:"syslogConfig,omitempty" yaml:"syslogConfig,omitempty"`
Transitioning string `json:"transitioning,omitempty" yaml:"transitioning,omitempty"`
TransitioningMessage string `json:"transitioningMessage,omitempty" yaml:"transitioningMessage,omitempty"`
UUID string `json:"uuid,omitempty" yaml:"uuid,omitempty"`
}
type ClusterLoggingCollection struct {
@ -75,6 +79,10 @@ type ClusterLoggingOperations interface {
Replace(existing *ClusterLogging) (*ClusterLogging, error)
ByID(id string) (*ClusterLogging, error)
Delete(container *ClusterLogging) error
CollectionActionDryRun(resource *ClusterLoggingCollection, input *ClusterTestInput) error
CollectionActionTest(resource *ClusterLoggingCollection, input *ClusterTestInput) error
}
func newClusterLoggingClient(apiClient *Client) *ClusterLoggingClient {
@ -127,3 +135,13 @@ func (c *ClusterLoggingClient) ByID(id string) (*ClusterLogging, error) {
func (c *ClusterLoggingClient) Delete(container *ClusterLogging) error {
return c.apiClient.Ops.DoResourceDelete(ClusterLoggingType, &container.Resource)
}
func (c *ClusterLoggingClient) CollectionActionDryRun(resource *ClusterLoggingCollection, input *ClusterTestInput) error {
err := c.apiClient.Ops.DoCollectionAction(ClusterLoggingType, "dryRun", &resource.Collection, input, nil)
return err
}
func (c *ClusterLoggingClient) CollectionActionTest(resource *ClusterLoggingCollection, input *ClusterTestInput) error {
err := c.apiClient.Ops.DoCollectionAction(ClusterLoggingType, "test", &resource.Collection, input, nil)
return err
}

View File

@ -1,26 +1,30 @@
package client
const (
ClusterLoggingSpecType = "clusterLoggingSpec"
ClusterLoggingSpecFieldClusterID = "clusterId"
ClusterLoggingSpecFieldDisplayName = "displayName"
ClusterLoggingSpecFieldElasticsearchConfig = "elasticsearchConfig"
ClusterLoggingSpecFieldFluentForwarderConfig = "fluentForwarderConfig"
ClusterLoggingSpecFieldKafkaConfig = "kafkaConfig"
ClusterLoggingSpecFieldOutputFlushInterval = "outputFlushInterval"
ClusterLoggingSpecFieldOutputTags = "outputTags"
ClusterLoggingSpecFieldSplunkConfig = "splunkConfig"
ClusterLoggingSpecFieldSyslogConfig = "syslogConfig"
ClusterLoggingSpecType = "clusterLoggingSpec"
ClusterLoggingSpecFieldClusterID = "clusterId"
ClusterLoggingSpecFieldCustomTargetConfig = "customTargetConfig"
ClusterLoggingSpecFieldDisplayName = "displayName"
ClusterLoggingSpecFieldElasticsearchConfig = "elasticsearchConfig"
ClusterLoggingSpecFieldExcludeSystemComponent = "excludeSystemComponent"
ClusterLoggingSpecFieldFluentForwarderConfig = "fluentForwarderConfig"
ClusterLoggingSpecFieldKafkaConfig = "kafkaConfig"
ClusterLoggingSpecFieldOutputFlushInterval = "outputFlushInterval"
ClusterLoggingSpecFieldOutputTags = "outputTags"
ClusterLoggingSpecFieldSplunkConfig = "splunkConfig"
ClusterLoggingSpecFieldSyslogConfig = "syslogConfig"
)
type ClusterLoggingSpec struct {
ClusterID string `json:"clusterId,omitempty" yaml:"clusterId,omitempty"`
DisplayName string `json:"displayName,omitempty" yaml:"displayName,omitempty"`
ElasticsearchConfig *ElasticsearchConfig `json:"elasticsearchConfig,omitempty" yaml:"elasticsearchConfig,omitempty"`
FluentForwarderConfig *FluentForwarderConfig `json:"fluentForwarderConfig,omitempty" yaml:"fluentForwarderConfig,omitempty"`
KafkaConfig *KafkaConfig `json:"kafkaConfig,omitempty" yaml:"kafkaConfig,omitempty"`
OutputFlushInterval int64 `json:"outputFlushInterval,omitempty" yaml:"outputFlushInterval,omitempty"`
OutputTags map[string]string `json:"outputTags,omitempty" yaml:"outputTags,omitempty"`
SplunkConfig *SplunkConfig `json:"splunkConfig,omitempty" yaml:"splunkConfig,omitempty"`
SyslogConfig *SyslogConfig `json:"syslogConfig,omitempty" yaml:"syslogConfig,omitempty"`
ClusterID string `json:"clusterId,omitempty" yaml:"clusterId,omitempty"`
CustomTargetConfig *CustomTargetConfig `json:"customTargetConfig,omitempty" yaml:"customTargetConfig,omitempty"`
DisplayName string `json:"displayName,omitempty" yaml:"displayName,omitempty"`
ElasticsearchConfig *ElasticsearchConfig `json:"elasticsearchConfig,omitempty" yaml:"elasticsearchConfig,omitempty"`
ExcludeSystemComponent bool `json:"excludeSystemComponent,omitempty" yaml:"excludeSystemComponent,omitempty"`
FluentForwarderConfig *FluentForwarderConfig `json:"fluentForwarderConfig,omitempty" yaml:"fluentForwarderConfig,omitempty"`
KafkaConfig *KafkaConfig `json:"kafkaConfig,omitempty" yaml:"kafkaConfig,omitempty"`
OutputFlushInterval int64 `json:"outputFlushInterval,omitempty" yaml:"outputFlushInterval,omitempty"`
OutputTags map[string]string `json:"outputTags,omitempty" yaml:"outputTags,omitempty"`
SplunkConfig *SplunkConfig `json:"splunkConfig,omitempty" yaml:"splunkConfig,omitempty"`
SyslogConfig *SyslogConfig `json:"syslogConfig,omitempty" yaml:"syslogConfig,omitempty"`
}

View File

@ -0,0 +1,22 @@
package client
const (
ClusterTestInputType = "clusterTestInput"
ClusterTestInputFieldClusterName = "clusterId"
ClusterTestInputFieldCustomTargetConfig = "customTargetConfig"
ClusterTestInputFieldElasticsearchConfig = "elasticsearchConfig"
ClusterTestInputFieldFluentForwarderConfig = "fluentForwarderConfig"
ClusterTestInputFieldKafkaConfig = "kafkaConfig"
ClusterTestInputFieldSplunkConfig = "splunkConfig"
ClusterTestInputFieldSyslogConfig = "syslogConfig"
)
type ClusterTestInput struct {
ClusterName string `json:"clusterId,omitempty" yaml:"clusterId,omitempty"`
CustomTargetConfig *CustomTargetConfig `json:"customTargetConfig,omitempty" yaml:"customTargetConfig,omitempty"`
ElasticsearchConfig *ElasticsearchConfig `json:"elasticsearchConfig,omitempty" yaml:"elasticsearchConfig,omitempty"`
FluentForwarderConfig *FluentForwarderConfig `json:"fluentForwarderConfig,omitempty" yaml:"fluentForwarderConfig,omitempty"`
KafkaConfig *KafkaConfig `json:"kafkaConfig,omitempty" yaml:"kafkaConfig,omitempty"`
SplunkConfig *SplunkConfig `json:"splunkConfig,omitempty" yaml:"splunkConfig,omitempty"`
SyslogConfig *SyslogConfig `json:"syslogConfig,omitempty" yaml:"syslogConfig,omitempty"`
}

View File

@ -0,0 +1,10 @@
package client
const (
CustomTargetConfigType = "customTargetConfig"
CustomTargetConfigFieldContent = "content"
)
type CustomTargetConfig struct {
Content string `json:"content,omitempty" yaml:"content,omitempty"`
}

View File

@ -9,6 +9,7 @@ const (
ProjectLoggingFieldAnnotations = "annotations"
ProjectLoggingFieldCreated = "created"
ProjectLoggingFieldCreatorID = "creatorId"
ProjectLoggingFieldCustomTargetConfig = "customTargetConfig"
ProjectLoggingFieldElasticsearchConfig = "elasticsearchConfig"
ProjectLoggingFieldFluentForwarderConfig = "fluentForwarderConfig"
ProjectLoggingFieldKafkaConfig = "kafkaConfig"
@ -34,6 +35,7 @@ type ProjectLogging struct {
Annotations map[string]string `json:"annotations,omitempty" yaml:"annotations,omitempty"`
Created string `json:"created,omitempty" yaml:"created,omitempty"`
CreatorID string `json:"creatorId,omitempty" yaml:"creatorId,omitempty"`
CustomTargetConfig *CustomTargetConfig `json:"customTargetConfig,omitempty" yaml:"customTargetConfig,omitempty"`
ElasticsearchConfig *ElasticsearchConfig `json:"elasticsearchConfig,omitempty" yaml:"elasticsearchConfig,omitempty"`
FluentForwarderConfig *FluentForwarderConfig `json:"fluentForwarderConfig,omitempty" yaml:"fluentForwarderConfig,omitempty"`
KafkaConfig *KafkaConfig `json:"kafkaConfig,omitempty" yaml:"kafkaConfig,omitempty"`
@ -71,6 +73,10 @@ type ProjectLoggingOperations interface {
Replace(existing *ProjectLogging) (*ProjectLogging, error)
ByID(id string) (*ProjectLogging, error)
Delete(container *ProjectLogging) error
CollectionActionDryRun(resource *ProjectLoggingCollection, input *ProjectTestInput) error
CollectionActionTest(resource *ProjectLoggingCollection, input *ProjectTestInput) error
}
func newProjectLoggingClient(apiClient *Client) *ProjectLoggingClient {
@ -123,3 +129,13 @@ func (c *ProjectLoggingClient) ByID(id string) (*ProjectLogging, error) {
func (c *ProjectLoggingClient) Delete(container *ProjectLogging) error {
return c.apiClient.Ops.DoResourceDelete(ProjectLoggingType, &container.Resource)
}
func (c *ProjectLoggingClient) CollectionActionDryRun(resource *ProjectLoggingCollection, input *ProjectTestInput) error {
err := c.apiClient.Ops.DoCollectionAction(ProjectLoggingType, "dryRun", &resource.Collection, input, nil)
return err
}
func (c *ProjectLoggingClient) CollectionActionTest(resource *ProjectLoggingCollection, input *ProjectTestInput) error {
err := c.apiClient.Ops.DoCollectionAction(ProjectLoggingType, "test", &resource.Collection, input, nil)
return err
}

View File

@ -2,6 +2,7 @@ package client
const (
ProjectLoggingSpecType = "projectLoggingSpec"
ProjectLoggingSpecFieldCustomTargetConfig = "customTargetConfig"
ProjectLoggingSpecFieldDisplayName = "displayName"
ProjectLoggingSpecFieldElasticsearchConfig = "elasticsearchConfig"
ProjectLoggingSpecFieldFluentForwarderConfig = "fluentForwarderConfig"
@ -14,6 +15,7 @@ const (
)
type ProjectLoggingSpec struct {
CustomTargetConfig *CustomTargetConfig `json:"customTargetConfig,omitempty" yaml:"customTargetConfig,omitempty"`
DisplayName string `json:"displayName,omitempty" yaml:"displayName,omitempty"`
ElasticsearchConfig *ElasticsearchConfig `json:"elasticsearchConfig,omitempty" yaml:"elasticsearchConfig,omitempty"`
FluentForwarderConfig *FluentForwarderConfig `json:"fluentForwarderConfig,omitempty" yaml:"fluentForwarderConfig,omitempty"`

View File

@ -0,0 +1,22 @@
package client
const (
ProjectTestInputType = "projectTestInput"
ProjectTestInputFieldCustomTargetConfig = "customTargetConfig"
ProjectTestInputFieldElasticsearchConfig = "elasticsearchConfig"
ProjectTestInputFieldFluentForwarderConfig = "fluentForwarderConfig"
ProjectTestInputFieldKafkaConfig = "kafkaConfig"
ProjectTestInputFieldProjectName = "projectId"
ProjectTestInputFieldSplunkConfig = "splunkConfig"
ProjectTestInputFieldSyslogConfig = "syslogConfig"
)
type ProjectTestInput struct {
CustomTargetConfig *CustomTargetConfig `json:"customTargetConfig,omitempty" yaml:"customTargetConfig,omitempty"`
ElasticsearchConfig *ElasticsearchConfig `json:"elasticsearchConfig,omitempty" yaml:"elasticsearchConfig,omitempty"`
FluentForwarderConfig *FluentForwarderConfig `json:"fluentForwarderConfig,omitempty" yaml:"fluentForwarderConfig,omitempty"`
KafkaConfig *KafkaConfig `json:"kafkaConfig,omitempty" yaml:"kafkaConfig,omitempty"`
ProjectName string `json:"projectId,omitempty" yaml:"projectId,omitempty"`
SplunkConfig *SplunkConfig `json:"splunkConfig,omitempty" yaml:"splunkConfig,omitempty"`
SyslogConfig *SyslogConfig `json:"syslogConfig,omitempty" yaml:"syslogConfig,omitempty"`
}