1// Code generated by protoc-gen-go. DO NOT EDIT.
2// source: google/cloud/automl/v1beta1/model_evaluation.proto
3
4package automl
5
6import (
7	fmt "fmt"
8	math "math"
9
10	proto "github.com/golang/protobuf/proto"
11	timestamp "github.com/golang/protobuf/ptypes/timestamp"
12	_ "google.golang.org/genproto/googleapis/api/annotations"
13)
14
15// Reference imports to suppress errors if they are not otherwise used.
16var _ = proto.Marshal
17var _ = fmt.Errorf
18var _ = math.Inf
19
20// This is a compile-time assertion to ensure that this generated file
21// is compatible with the proto package it is being compiled against.
22// A compilation error at this line likely means your copy of the
23// proto package needs to be updated.
24const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
25
26// Evaluation results of a model.
27type ModelEvaluation struct {
28	// Output only. Problem type specific evaluation metrics.
29	//
30	// Types that are valid to be assigned to Metrics:
31	//	*ModelEvaluation_ClassificationEvaluationMetrics
32	//	*ModelEvaluation_RegressionEvaluationMetrics
33	//	*ModelEvaluation_TranslationEvaluationMetrics
34	//	*ModelEvaluation_ImageObjectDetectionEvaluationMetrics
35	//	*ModelEvaluation_VideoObjectTrackingEvaluationMetrics
36	//	*ModelEvaluation_TextSentimentEvaluationMetrics
37	//	*ModelEvaluation_TextExtractionEvaluationMetrics
38	Metrics isModelEvaluation_Metrics `protobuf_oneof:"metrics"`
39	// Output only. Resource name of the model evaluation.
40	// Format:
41	//
42	// `projects/{project_id}/locations/{location_id}/models/{model_id}/modelEvaluations/{model_evaluation_id}`
43	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
44	// Output only. The ID of the annotation spec that the model evaluation applies to. The
45	// The ID is empty for the overall model evaluation.
46	// For Tables annotation specs in the dataset do not exist and this ID is
47	// always not set, but for CLASSIFICATION
48	//
49	// [prediction_type-s][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]
50	// the
51	// [display_name][google.cloud.automl.v1beta1.ModelEvaluation.display_name]
52	// field is used.
53	AnnotationSpecId string `protobuf:"bytes,2,opt,name=annotation_spec_id,json=annotationSpecId,proto3" json:"annotation_spec_id,omitempty"`
54	// Output only. The value of
55	// [display_name][google.cloud.automl.v1beta1.AnnotationSpec.display_name] at
56	// the moment when the model was trained. Because this field returns a value
57	// at model training time, for different models trained from the same dataset,
58	// the values may differ, since display names could had been changed between
59	// the two model's trainings.
60	// For Tables CLASSIFICATION
61	//
62	// [prediction_type-s][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]
63	// distinct values of the target column at the moment of the model evaluation
64	// are populated here.
65	// The display_name is empty for the overall model evaluation.
66	DisplayName string `protobuf:"bytes,15,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"`
67	// Output only. Timestamp when this model evaluation was created.
68	CreateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"`
69	// Output only. The number of examples used for model evaluation, i.e. for
70	// which ground truth from time of model creation is compared against the
71	// predicted annotations created by the model.
72	// For overall ModelEvaluation (i.e. with annotation_spec_id not set) this is
73	// the total number of all examples used for evaluation.
74	// Otherwise, this is the count of examples that according to the ground
75	// truth were annotated by the
76	//
77	// [annotation_spec_id][google.cloud.automl.v1beta1.ModelEvaluation.annotation_spec_id].
78	EvaluatedExampleCount int32    `protobuf:"varint,6,opt,name=evaluated_example_count,json=evaluatedExampleCount,proto3" json:"evaluated_example_count,omitempty"`
79	XXX_NoUnkeyedLiteral  struct{} `json:"-"`
80	XXX_unrecognized      []byte   `json:"-"`
81	XXX_sizecache         int32    `json:"-"`
82}
83
84func (m *ModelEvaluation) Reset()         { *m = ModelEvaluation{} }
85func (m *ModelEvaluation) String() string { return proto.CompactTextString(m) }
86func (*ModelEvaluation) ProtoMessage()    {}
87func (*ModelEvaluation) Descriptor() ([]byte, []int) {
88	return fileDescriptor_2d3cea51cfd5443a, []int{0}
89}
90
91func (m *ModelEvaluation) XXX_Unmarshal(b []byte) error {
92	return xxx_messageInfo_ModelEvaluation.Unmarshal(m, b)
93}
94func (m *ModelEvaluation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
95	return xxx_messageInfo_ModelEvaluation.Marshal(b, m, deterministic)
96}
97func (m *ModelEvaluation) XXX_Merge(src proto.Message) {
98	xxx_messageInfo_ModelEvaluation.Merge(m, src)
99}
100func (m *ModelEvaluation) XXX_Size() int {
101	return xxx_messageInfo_ModelEvaluation.Size(m)
102}
103func (m *ModelEvaluation) XXX_DiscardUnknown() {
104	xxx_messageInfo_ModelEvaluation.DiscardUnknown(m)
105}
106
107var xxx_messageInfo_ModelEvaluation proto.InternalMessageInfo
108
109type isModelEvaluation_Metrics interface {
110	isModelEvaluation_Metrics()
111}
112
113type ModelEvaluation_ClassificationEvaluationMetrics struct {
114	ClassificationEvaluationMetrics *ClassificationEvaluationMetrics `protobuf:"bytes,8,opt,name=classification_evaluation_metrics,json=classificationEvaluationMetrics,proto3,oneof"`
115}
116
117type ModelEvaluation_RegressionEvaluationMetrics struct {
118	RegressionEvaluationMetrics *RegressionEvaluationMetrics `protobuf:"bytes,24,opt,name=regression_evaluation_metrics,json=regressionEvaluationMetrics,proto3,oneof"`
119}
120
121type ModelEvaluation_TranslationEvaluationMetrics struct {
122	TranslationEvaluationMetrics *TranslationEvaluationMetrics `protobuf:"bytes,9,opt,name=translation_evaluation_metrics,json=translationEvaluationMetrics,proto3,oneof"`
123}
124
125type ModelEvaluation_ImageObjectDetectionEvaluationMetrics struct {
126	ImageObjectDetectionEvaluationMetrics *ImageObjectDetectionEvaluationMetrics `protobuf:"bytes,12,opt,name=image_object_detection_evaluation_metrics,json=imageObjectDetectionEvaluationMetrics,proto3,oneof"`
127}
128
129type ModelEvaluation_VideoObjectTrackingEvaluationMetrics struct {
130	VideoObjectTrackingEvaluationMetrics *VideoObjectTrackingEvaluationMetrics `protobuf:"bytes,14,opt,name=video_object_tracking_evaluation_metrics,json=videoObjectTrackingEvaluationMetrics,proto3,oneof"`
131}
132
133type ModelEvaluation_TextSentimentEvaluationMetrics struct {
134	TextSentimentEvaluationMetrics *TextSentimentEvaluationMetrics `protobuf:"bytes,11,opt,name=text_sentiment_evaluation_metrics,json=textSentimentEvaluationMetrics,proto3,oneof"`
135}
136
137type ModelEvaluation_TextExtractionEvaluationMetrics struct {
138	TextExtractionEvaluationMetrics *TextExtractionEvaluationMetrics `protobuf:"bytes,13,opt,name=text_extraction_evaluation_metrics,json=textExtractionEvaluationMetrics,proto3,oneof"`
139}
140
141func (*ModelEvaluation_ClassificationEvaluationMetrics) isModelEvaluation_Metrics() {}
142
143func (*ModelEvaluation_RegressionEvaluationMetrics) isModelEvaluation_Metrics() {}
144
145func (*ModelEvaluation_TranslationEvaluationMetrics) isModelEvaluation_Metrics() {}
146
147func (*ModelEvaluation_ImageObjectDetectionEvaluationMetrics) isModelEvaluation_Metrics() {}
148
149func (*ModelEvaluation_VideoObjectTrackingEvaluationMetrics) isModelEvaluation_Metrics() {}
150
151func (*ModelEvaluation_TextSentimentEvaluationMetrics) isModelEvaluation_Metrics() {}
152
153func (*ModelEvaluation_TextExtractionEvaluationMetrics) isModelEvaluation_Metrics() {}
154
155func (m *ModelEvaluation) GetMetrics() isModelEvaluation_Metrics {
156	if m != nil {
157		return m.Metrics
158	}
159	return nil
160}
161
162func (m *ModelEvaluation) GetClassificationEvaluationMetrics() *ClassificationEvaluationMetrics {
163	if x, ok := m.GetMetrics().(*ModelEvaluation_ClassificationEvaluationMetrics); ok {
164		return x.ClassificationEvaluationMetrics
165	}
166	return nil
167}
168
169func (m *ModelEvaluation) GetRegressionEvaluationMetrics() *RegressionEvaluationMetrics {
170	if x, ok := m.GetMetrics().(*ModelEvaluation_RegressionEvaluationMetrics); ok {
171		return x.RegressionEvaluationMetrics
172	}
173	return nil
174}
175
176func (m *ModelEvaluation) GetTranslationEvaluationMetrics() *TranslationEvaluationMetrics {
177	if x, ok := m.GetMetrics().(*ModelEvaluation_TranslationEvaluationMetrics); ok {
178		return x.TranslationEvaluationMetrics
179	}
180	return nil
181}
182
183func (m *ModelEvaluation) GetImageObjectDetectionEvaluationMetrics() *ImageObjectDetectionEvaluationMetrics {
184	if x, ok := m.GetMetrics().(*ModelEvaluation_ImageObjectDetectionEvaluationMetrics); ok {
185		return x.ImageObjectDetectionEvaluationMetrics
186	}
187	return nil
188}
189
190func (m *ModelEvaluation) GetVideoObjectTrackingEvaluationMetrics() *VideoObjectTrackingEvaluationMetrics {
191	if x, ok := m.GetMetrics().(*ModelEvaluation_VideoObjectTrackingEvaluationMetrics); ok {
192		return x.VideoObjectTrackingEvaluationMetrics
193	}
194	return nil
195}
196
197func (m *ModelEvaluation) GetTextSentimentEvaluationMetrics() *TextSentimentEvaluationMetrics {
198	if x, ok := m.GetMetrics().(*ModelEvaluation_TextSentimentEvaluationMetrics); ok {
199		return x.TextSentimentEvaluationMetrics
200	}
201	return nil
202}
203
204func (m *ModelEvaluation) GetTextExtractionEvaluationMetrics() *TextExtractionEvaluationMetrics {
205	if x, ok := m.GetMetrics().(*ModelEvaluation_TextExtractionEvaluationMetrics); ok {
206		return x.TextExtractionEvaluationMetrics
207	}
208	return nil
209}
210
211func (m *ModelEvaluation) GetName() string {
212	if m != nil {
213		return m.Name
214	}
215	return ""
216}
217
218func (m *ModelEvaluation) GetAnnotationSpecId() string {
219	if m != nil {
220		return m.AnnotationSpecId
221	}
222	return ""
223}
224
225func (m *ModelEvaluation) GetDisplayName() string {
226	if m != nil {
227		return m.DisplayName
228	}
229	return ""
230}
231
232func (m *ModelEvaluation) GetCreateTime() *timestamp.Timestamp {
233	if m != nil {
234		return m.CreateTime
235	}
236	return nil
237}
238
239func (m *ModelEvaluation) GetEvaluatedExampleCount() int32 {
240	if m != nil {
241		return m.EvaluatedExampleCount
242	}
243	return 0
244}
245
246// XXX_OneofWrappers is for the internal use of the proto package.
247func (*ModelEvaluation) XXX_OneofWrappers() []interface{} {
248	return []interface{}{
249		(*ModelEvaluation_ClassificationEvaluationMetrics)(nil),
250		(*ModelEvaluation_RegressionEvaluationMetrics)(nil),
251		(*ModelEvaluation_TranslationEvaluationMetrics)(nil),
252		(*ModelEvaluation_ImageObjectDetectionEvaluationMetrics)(nil),
253		(*ModelEvaluation_VideoObjectTrackingEvaluationMetrics)(nil),
254		(*ModelEvaluation_TextSentimentEvaluationMetrics)(nil),
255		(*ModelEvaluation_TextExtractionEvaluationMetrics)(nil),
256	}
257}
258
259func init() {
260	proto.RegisterType((*ModelEvaluation)(nil), "google.cloud.automl.v1beta1.ModelEvaluation")
261}
262
263func init() {
264	proto.RegisterFile("google/cloud/automl/v1beta1/model_evaluation.proto", fileDescriptor_2d3cea51cfd5443a)
265}
266
267var fileDescriptor_2d3cea51cfd5443a = []byte{
268	// 722 bytes of a gzipped FileDescriptorProto
269	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x95, 0xdf, 0x6a, 0xd4, 0x4e,
270	0x14, 0xc7, 0x7f, 0x53, 0x7e, 0xad, 0x76, 0xb6, 0x5a, 0x19, 0x10, 0xd7, 0x6d, 0xed, 0x3f, 0x2c,
271	0xac, 0x58, 0x93, 0xb6, 0x82, 0xe8, 0xd6, 0x9b, 0x6d, 0x2d, 0x5a, 0xb0, 0x2a, 0xdb, 0xd2, 0x8b,
272	0x52, 0x08, 0xb3, 0x93, 0xd3, 0x10, 0x4d, 0x32, 0x21, 0x33, 0x59, 0x56, 0x4a, 0x05, 0xf1, 0x42,
273	0xd1, 0x47, 0x10, 0xbc, 0xf3, 0x65, 0x7c, 0x13, 0xfb, 0x14, 0x92, 0xc9, 0x64, 0x93, 0x96, 0x38,
274	0xf6, 0x6a, 0x93, 0x3d, 0xdf, 0xf3, 0x9d, 0xcf, 0x9c, 0x33, 0x73, 0x82, 0xd7, 0x3d, 0xce, 0xbd,
275	0x00, 0x6c, 0x16, 0xf0, 0xd4, 0xb5, 0x69, 0x2a, 0x79, 0x18, 0xd8, 0x83, 0xb5, 0x3e, 0x48, 0xba,
276	0x66, 0x87, 0xdc, 0x85, 0xc0, 0x81, 0x01, 0x0d, 0x52, 0x2a, 0x7d, 0x1e, 0x59, 0x71, 0xc2, 0x25,
277	0x27, 0x33, 0x79, 0x8e, 0xa5, 0x72, 0xac, 0x3c, 0xc7, 0xd2, 0x39, 0xad, 0xdb, 0xda, 0x90, 0xc6,
278	0xbe, 0x9d, 0x80, 0xe0, 0x69, 0xc2, 0x20, 0xcf, 0x6b, 0xad, 0x9a, 0xd6, 0x62, 0x01, 0x15, 0xc2,
279	0x3f, 0xf6, 0x59, 0x65, 0xa5, 0xd6, 0x7d, 0x53, 0x86, 0x0b, 0x12, 0x58, 0x45, 0xbc, 0x62, 0x12,
280	0x27, 0xe0, 0x25, 0x20, 0x44, 0xa9, 0x6e, 0x9b, 0xd4, 0x92, 0xf6, 0x03, 0x10, 0x5a, 0xb9, 0x66,
281	0x54, 0xc2, 0x50, 0x3a, 0x30, 0x94, 0x09, 0xad, 0xa2, 0xac, 0xfe, 0x33, 0x45, 0x40, 0x24, 0xfd,
282	0x10, 0x22, 0xa9, 0x33, 0x1e, 0x18, 0x33, 0x12, 0x1a, 0x89, 0xa0, 0x5a, 0x98, 0x79, 0x2d, 0x57,
283	0x6f, 0xfd, 0xf4, 0xd8, 0xce, 0xcc, 0x84, 0xa4, 0x61, 0xac, 0x05, 0xb3, 0x95, 0x36, 0xd0, 0x28,
284	0xe2, 0x52, 0x65, 0xeb, 0x2d, 0x2d, 0xfd, 0xc6, 0x78, 0x7a, 0x37, 0x6b, 0xee, 0xf6, 0xa8, 0xb7,
285	0xe4, 0x2b, 0xc2, 0x8b, 0xe7, 0x9b, 0x50, 0xe9, 0xbc, 0x13, 0x82, 0x4c, 0x7c, 0x26, 0x9a, 0x57,
286	0x17, 0x50, 0xbb, 0xb1, 0xfe, 0xd4, 0x32, 0x1c, 0x01, 0x6b, 0xeb, 0x9c, 0x4b, 0xb9, 0xc4, 0x6e,
287	0xee, 0xf1, 0xe2, 0xbf, 0xde, 0x3c, 0x33, 0x4b, 0xc8, 0x07, 0x7c, 0xa7, 0xec, 0x58, 0x1d, 0x47,
288	0x53, 0x71, 0x3c, 0x36, 0x72, 0xf4, 0x46, 0x0e, 0x75, 0x0c, 0x33, 0xc9, 0xdf, 0xc3, 0xe4, 0x23,
289	0xc2, 0x73, 0x95, 0xaa, 0xd7, 0x11, 0x4c, 0x2a, 0x82, 0x27, 0x46, 0x82, 0xfd, 0xd2, 0xa2, 0x0e,
290	0x61, 0x56, 0x1a, 0xe2, 0xe4, 0x07, 0xc2, 0xf7, 0xfc, 0x90, 0x7a, 0xe0, 0xf0, 0xfe, 0x5b, 0x60,
291	0xd2, 0x19, 0x1d, 0xf8, 0x3a, 0x9c, 0x29, 0x85, 0xb3, 0x69, 0xc4, 0xd9, 0xc9, 0xdc, 0x5e, 0x2b,
292	0xb3, 0x67, 0x85, 0x57, 0x1d, 0xd7, 0xb2, 0x7f, 0x19, 0x21, 0xf9, 0x8e, 0x70, 0x7b, 0xe0, 0xbb,
293	0xc0, 0x0b, 0xc0, 0xec, 0x16, 0xbc, 0xf3, 0x23, 0xaf, 0x8e, 0xef, 0xba, 0xe2, 0xeb, 0x1a, 0xf9,
294	0x0e, 0x32, 0xb3, 0x7c, 0xd9, 0x7d, 0x6d, 0x55, 0x87, 0x77, 0x77, 0x70, 0x09, 0x1d, 0xf9, 0x82,
295	0xf0, 0xe2, 0xf9, 0xab, 0x56, 0x87, 0xd5, 0x50, 0x58, 0x1b, 0xe6, 0x2e, 0xc2, 0x50, 0xee, 0x15,
296	0x26, 0x75, 0x40, 0x73, 0xd2, 0xa8, 0x20, 0xdf, 0x10, 0x5e, 0xba, 0x30, 0x28, 0xea, 0x58, 0xae,
297	0x5d, 0xe2, 0x6e, 0x65, 0x2c, 0xdb, 0x23, 0x97, 0xda, 0xbb, 0x25, 0xcd, 0x12, 0x42, 0xf0, 0xff,
298	0x11, 0x0d, 0xa1, 0x89, 0x16, 0x50, 0x7b, 0xb2, 0xa7, 0x9e, 0xc9, 0x0a, 0x26, 0xe5, 0x94, 0x70,
299	0x44, 0x0c, 0xcc, 0xf1, 0xdd, 0xe6, 0x98, 0x52, 0xdc, 0x28, 0x23, 0x7b, 0x31, 0xb0, 0x1d, 0x97,
300	0x2c, 0xe2, 0x29, 0xd7, 0x17, 0x71, 0x40, 0xdf, 0x3b, 0xca, 0x69, 0x5a, 0xe9, 0x1a, 0xfa, 0xbf,
301	0x57, 0x99, 0xe1, 0x06, 0x6e, 0xb0, 0x04, 0xa8, 0x04, 0x27, 0x2b, 0x4a, 0x73, 0x5c, 0x6d, 0xad,
302	0x55, 0x6c, 0xad, 0x18, 0x5b, 0xd6, 0x7e, 0x31, 0xb6, 0x7a, 0x38, 0x97, 0x67, 0x7f, 0x90, 0x47,
303	0xf8, 0x96, 0x2e, 0x0f, 0xb8, 0x0e, 0x0c, 0x69, 0x18, 0x07, 0xe0, 0x30, 0x9e, 0x46, 0xb2, 0x39,
304	0xb1, 0x80, 0xda, 0xe3, 0xbd, 0x9b, 0xa3, 0xf0, 0x76, 0x1e, 0xdd, 0xca, 0x82, 0x9d, 0xcf, 0xe8,
305	0xac, 0xfb, 0x09, 0xe1, 0x65, 0x5d, 0xb3, 0x7c, 0x31, 0x1a, 0xfb, 0xc2, 0x62, 0x3c, 0xb4, 0x2f,
306	0x0e, 0xbc, 0xc3, 0x38, 0xe1, 0xd9, 0x11, 0x12, 0xf6, 0x89, 0x7e, 0x3a, 0xb5, 0x03, 0x9e, 0x4f,
307	0x24, 0x61, 0x9f, 0x14, 0x8f, 0xa7, 0xf9, 0x67, 0x50, 0xd8, 0x27, 0xea, 0x57, 0xbf, 0x96, 0x3e,
308	0x45, 0xa0, 0xd2, 0xd1, 0xd3, 0xcd, 0x49, 0x7c, 0x45, 0x77, 0x75, 0xf3, 0x27, 0xc2, 0xf3, 0x8c,
309	0x87, 0xa6, 0xae, 0xbe, 0x41, 0x87, 0x5d, 0x1d, 0xf6, 0x78, 0x40, 0x23, 0xcf, 0xe2, 0x89, 0x67,
310	0x7b, 0x10, 0xa9, 0x3a, 0xd9, 0xe5, 0x3e, 0x6a, 0x3f, 0x0f, 0x1b, 0xf9, 0xeb, 0xaf, 0xb1, 0x99,
311	0xe7, 0x4a, 0x78, 0xb4, 0x95, 0x89, 0x8e, 0xba, 0xa9, 0xe4, 0xbb, 0xc1, 0xd1, 0x41, 0x2e, 0x3a,
312	0x1b, 0x9b, 0xcb, 0xa3, 0x9d, 0x8e, 0x0a, 0x77, 0x3a, 0x2a, 0xfe, 0xb2, 0xd3, 0xd1, 0x82, 0xfe,
313	0x84, 0x5a, 0xec, 0xe1, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x25, 0x4e, 0xd5, 0xf7, 0x12, 0x08,
314	0x00, 0x00,
315}
316