// Code generated by protoc-gen-go. DO NOT EDIT. // source: google/cloud/videointelligence/v1beta2/video_intelligence.proto /* Package videointelligence is a generated protocol buffer package. It is generated from these files: google/cloud/videointelligence/v1beta2/video_intelligence.proto It has these top-level messages: AnnotateVideoRequest VideoContext LabelDetectionConfig ShotChangeDetectionConfig ExplicitContentDetectionConfig FaceDetectionConfig VideoSegment LabelSegment LabelFrame Entity LabelAnnotation ExplicitContentFrame ExplicitContentAnnotation NormalizedBoundingBox FaceSegment FaceFrame FaceAnnotation VideoAnnotationResults AnnotateVideoResponse VideoAnnotationProgress AnnotateVideoProgress */ package videointelligence import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" import _ "google.golang.org/genproto/googleapis/api/annotations" import google_longrunning "google.golang.org/genproto/googleapis/longrunning" import google_protobuf3 "github.com/golang/protobuf/ptypes/duration" import google_protobuf4 "github.com/golang/protobuf/ptypes/timestamp" import google_rpc "google.golang.org/genproto/googleapis/rpc/status" import ( context "golang.org/x/net/context" grpc "google.golang.org/grpc" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // Video annotation feature. type Feature int32 const ( // Unspecified. Feature_FEATURE_UNSPECIFIED Feature = 0 // Label detection. Detect objects, such as dog or flower. Feature_LABEL_DETECTION Feature = 1 // Shot change detection. Feature_SHOT_CHANGE_DETECTION Feature = 2 // Explicit content detection. Feature_EXPLICIT_CONTENT_DETECTION Feature = 3 // Human face detection and tracking. Feature_FACE_DETECTION Feature = 4 ) var Feature_name = map[int32]string{ 0: "FEATURE_UNSPECIFIED", 1: "LABEL_DETECTION", 2: "SHOT_CHANGE_DETECTION", 3: "EXPLICIT_CONTENT_DETECTION", 4: "FACE_DETECTION", } var Feature_value = map[string]int32{ "FEATURE_UNSPECIFIED": 0, "LABEL_DETECTION": 1, "SHOT_CHANGE_DETECTION": 2, "EXPLICIT_CONTENT_DETECTION": 3, "FACE_DETECTION": 4, } func (x Feature) String() string { return proto.EnumName(Feature_name, int32(x)) } func (Feature) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } // Label detection mode. type LabelDetectionMode int32 const ( // Unspecified. LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED LabelDetectionMode = 0 // Detect shot-level labels. LabelDetectionMode_SHOT_MODE LabelDetectionMode = 1 // Detect frame-level labels. LabelDetectionMode_FRAME_MODE LabelDetectionMode = 2 // Detect both shot-level and frame-level labels. LabelDetectionMode_SHOT_AND_FRAME_MODE LabelDetectionMode = 3 ) var LabelDetectionMode_name = map[int32]string{ 0: "LABEL_DETECTION_MODE_UNSPECIFIED", 1: "SHOT_MODE", 2: "FRAME_MODE", 3: "SHOT_AND_FRAME_MODE", } var LabelDetectionMode_value = map[string]int32{ "LABEL_DETECTION_MODE_UNSPECIFIED": 0, "SHOT_MODE": 1, "FRAME_MODE": 2, "SHOT_AND_FRAME_MODE": 3, } func (x LabelDetectionMode) String() string { return proto.EnumName(LabelDetectionMode_name, int32(x)) } func (LabelDetectionMode) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } // Bucketized representation of likelihood. type Likelihood int32 const ( // Unspecified likelihood. Likelihood_LIKELIHOOD_UNSPECIFIED Likelihood = 0 // Very unlikely. Likelihood_VERY_UNLIKELY Likelihood = 1 // Unlikely. Likelihood_UNLIKELY Likelihood = 2 // Possible. Likelihood_POSSIBLE Likelihood = 3 // Likely. Likelihood_LIKELY Likelihood = 4 // Very likely. Likelihood_VERY_LIKELY Likelihood = 5 ) var Likelihood_name = map[int32]string{ 0: "LIKELIHOOD_UNSPECIFIED", 1: "VERY_UNLIKELY", 2: "UNLIKELY", 3: "POSSIBLE", 4: "LIKELY", 5: "VERY_LIKELY", } var Likelihood_value = map[string]int32{ "LIKELIHOOD_UNSPECIFIED": 0, "VERY_UNLIKELY": 1, "UNLIKELY": 2, "POSSIBLE": 3, "LIKELY": 4, "VERY_LIKELY": 5, } func (x Likelihood) String() string { return proto.EnumName(Likelihood_name, int32(x)) } func (Likelihood) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } // Video annotation request. type AnnotateVideoRequest struct { // Input video location. Currently, only // [Google Cloud Storage](https://cloud.google.com/storage/) URIs are // supported, which must be specified in the following format: // `gs://bucket-id/object-id` (other URI formats return // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see // [Request URIs](/storage/docs/reference-uris). // A video URI may include wildcards in `object-id`, and thus identify // multiple videos. Supported wildcards: '*' to match 0 or more characters; // '?' to match 1 character. If unset, the input video should be embedded // in the request as `input_content`. If set, `input_content` should be unset. InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri" json:"input_uri,omitempty"` // The video data bytes. Encoding: base64. If unset, the input video(s) // should be specified via `input_uri`. If set, `input_uri` should be unset. InputContent []byte `protobuf:"bytes,6,opt,name=input_content,json=inputContent,proto3" json:"input_content,omitempty"` // Requested video annotation features. Features []Feature `protobuf:"varint,2,rep,packed,name=features,enum=google.cloud.videointelligence.v1beta2.Feature" json:"features,omitempty"` // Additional video context and/or feature-specific parameters. VideoContext *VideoContext `protobuf:"bytes,3,opt,name=video_context,json=videoContext" json:"video_context,omitempty"` // Optional location where the output (in JSON format) should be stored. // Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) // URIs are supported, which must be specified in the following format: // `gs://bucket-id/object-id` (other URI formats return // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see // [Request URIs](/storage/docs/reference-uris). OutputUri string `protobuf:"bytes,4,opt,name=output_uri,json=outputUri" json:"output_uri,omitempty"` // Optional cloud region where annotation should take place. Supported cloud // regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region // is specified, a region will be determined based on video file location. LocationId string `protobuf:"bytes,5,opt,name=location_id,json=locationId" json:"location_id,omitempty"` } func (m *AnnotateVideoRequest) Reset() { *m = AnnotateVideoRequest{} } func (m *AnnotateVideoRequest) String() string { return proto.CompactTextString(m) } func (*AnnotateVideoRequest) ProtoMessage() {} func (*AnnotateVideoRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } func (m *AnnotateVideoRequest) GetInputUri() string { if m != nil { return m.InputUri } return "" } func (m *AnnotateVideoRequest) GetInputContent() []byte { if m != nil { return m.InputContent } return nil } func (m *AnnotateVideoRequest) GetFeatures() []Feature { if m != nil { return m.Features } return nil } func (m *AnnotateVideoRequest) GetVideoContext() *VideoContext { if m != nil { return m.VideoContext } return nil } func (m *AnnotateVideoRequest) GetOutputUri() string { if m != nil { return m.OutputUri } return "" } func (m *AnnotateVideoRequest) GetLocationId() string { if m != nil { return m.LocationId } return "" } // Video context and/or feature-specific parameters. type VideoContext struct { // Video segments to annotate. The segments may overlap and are not required // to be contiguous or span the whole video. If unspecified, each video // is treated as a single segment. Segments []*VideoSegment `protobuf:"bytes,1,rep,name=segments" json:"segments,omitempty"` // Config for LABEL_DETECTION. LabelDetectionConfig *LabelDetectionConfig `protobuf:"bytes,2,opt,name=label_detection_config,json=labelDetectionConfig" json:"label_detection_config,omitempty"` // Config for SHOT_CHANGE_DETECTION. ShotChangeDetectionConfig *ShotChangeDetectionConfig `protobuf:"bytes,3,opt,name=shot_change_detection_config,json=shotChangeDetectionConfig" json:"shot_change_detection_config,omitempty"` // Config for EXPLICIT_CONTENT_DETECTION. ExplicitContentDetectionConfig *ExplicitContentDetectionConfig `protobuf:"bytes,4,opt,name=explicit_content_detection_config,json=explicitContentDetectionConfig" json:"explicit_content_detection_config,omitempty"` // Config for FACE_DETECTION. FaceDetectionConfig *FaceDetectionConfig `protobuf:"bytes,5,opt,name=face_detection_config,json=faceDetectionConfig" json:"face_detection_config,omitempty"` } func (m *VideoContext) Reset() { *m = VideoContext{} } func (m *VideoContext) String() string { return proto.CompactTextString(m) } func (*VideoContext) ProtoMessage() {} func (*VideoContext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } func (m *VideoContext) GetSegments() []*VideoSegment { if m != nil { return m.Segments } return nil } func (m *VideoContext) GetLabelDetectionConfig() *LabelDetectionConfig { if m != nil { return m.LabelDetectionConfig } return nil } func (m *VideoContext) GetShotChangeDetectionConfig() *ShotChangeDetectionConfig { if m != nil { return m.ShotChangeDetectionConfig } return nil } func (m *VideoContext) GetExplicitContentDetectionConfig() *ExplicitContentDetectionConfig { if m != nil { return m.ExplicitContentDetectionConfig } return nil } func (m *VideoContext) GetFaceDetectionConfig() *FaceDetectionConfig { if m != nil { return m.FaceDetectionConfig } return nil } // Config for LABEL_DETECTION. type LabelDetectionConfig struct { // What labels should be detected with LABEL_DETECTION, in addition to // video-level labels or segment-level labels. // If unspecified, defaults to `SHOT_MODE`. LabelDetectionMode LabelDetectionMode `protobuf:"varint,1,opt,name=label_detection_mode,json=labelDetectionMode,enum=google.cloud.videointelligence.v1beta2.LabelDetectionMode" json:"label_detection_mode,omitempty"` // Whether the video has been shot from a stationary (i.e. non-moving) camera. // When set to true, might improve detection accuracy for moving objects. // Should be used with `SHOT_AND_FRAME_MODE` enabled. StationaryCamera bool `protobuf:"varint,2,opt,name=stationary_camera,json=stationaryCamera" json:"stationary_camera,omitempty"` // Model to use for label detection. // Supported values: "builtin/stable" (the default if unset) and // "builtin/latest". Model string `protobuf:"bytes,3,opt,name=model" json:"model,omitempty"` } func (m *LabelDetectionConfig) Reset() { *m = LabelDetectionConfig{} } func (m *LabelDetectionConfig) String() string { return proto.CompactTextString(m) } func (*LabelDetectionConfig) ProtoMessage() {} func (*LabelDetectionConfig) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } func (m *LabelDetectionConfig) GetLabelDetectionMode() LabelDetectionMode { if m != nil { return m.LabelDetectionMode } return LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED } func (m *LabelDetectionConfig) GetStationaryCamera() bool { if m != nil { return m.StationaryCamera } return false } func (m *LabelDetectionConfig) GetModel() string { if m != nil { return m.Model } return "" } // Config for SHOT_CHANGE_DETECTION. type ShotChangeDetectionConfig struct { // Model to use for shot change detection. // Supported values: "builtin/stable" (the default if unset) and // "builtin/latest". Model string `protobuf:"bytes,1,opt,name=model" json:"model,omitempty"` } func (m *ShotChangeDetectionConfig) Reset() { *m = ShotChangeDetectionConfig{} } func (m *ShotChangeDetectionConfig) String() string { return proto.CompactTextString(m) } func (*ShotChangeDetectionConfig) ProtoMessage() {} func (*ShotChangeDetectionConfig) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } func (m *ShotChangeDetectionConfig) GetModel() string { if m != nil { return m.Model } return "" } // Config for EXPLICIT_CONTENT_DETECTION. type ExplicitContentDetectionConfig struct { // Model to use for explicit content detection. // Supported values: "builtin/stable" (the default if unset) and // "builtin/latest". Model string `protobuf:"bytes,1,opt,name=model" json:"model,omitempty"` } func (m *ExplicitContentDetectionConfig) Reset() { *m = ExplicitContentDetectionConfig{} } func (m *ExplicitContentDetectionConfig) String() string { return proto.CompactTextString(m) } func (*ExplicitContentDetectionConfig) ProtoMessage() {} func (*ExplicitContentDetectionConfig) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } func (m *ExplicitContentDetectionConfig) GetModel() string { if m != nil { return m.Model } return "" } // Config for FACE_DETECTION. type FaceDetectionConfig struct { // Model to use for face detection. // Supported values: "builtin/stable" (the default if unset) and // "builtin/latest". Model string `protobuf:"bytes,1,opt,name=model" json:"model,omitempty"` // Whether bounding boxes be included in the face annotation output. IncludeBoundingBoxes bool `protobuf:"varint,2,opt,name=include_bounding_boxes,json=includeBoundingBoxes" json:"include_bounding_boxes,omitempty"` } func (m *FaceDetectionConfig) Reset() { *m = FaceDetectionConfig{} } func (m *FaceDetectionConfig) String() string { return proto.CompactTextString(m) } func (*FaceDetectionConfig) ProtoMessage() {} func (*FaceDetectionConfig) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } func (m *FaceDetectionConfig) GetModel() string { if m != nil { return m.Model } return "" } func (m *FaceDetectionConfig) GetIncludeBoundingBoxes() bool { if m != nil { return m.IncludeBoundingBoxes } return false } // Video segment. type VideoSegment struct { // Time-offset, relative to the beginning of the video, // corresponding to the start of the segment (inclusive). StartTimeOffset *google_protobuf3.Duration `protobuf:"bytes,1,opt,name=start_time_offset,json=startTimeOffset" json:"start_time_offset,omitempty"` // Time-offset, relative to the beginning of the video, // corresponding to the end of the segment (inclusive). EndTimeOffset *google_protobuf3.Duration `protobuf:"bytes,2,opt,name=end_time_offset,json=endTimeOffset" json:"end_time_offset,omitempty"` } func (m *VideoSegment) Reset() { *m = VideoSegment{} } func (m *VideoSegment) String() string { return proto.CompactTextString(m) } func (*VideoSegment) ProtoMessage() {} func (*VideoSegment) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } func (m *VideoSegment) GetStartTimeOffset() *google_protobuf3.Duration { if m != nil { return m.StartTimeOffset } return nil } func (m *VideoSegment) GetEndTimeOffset() *google_protobuf3.Duration { if m != nil { return m.EndTimeOffset } return nil } // Video segment level annotation results for label detection. type LabelSegment struct { // Video segment where a label was detected. Segment *VideoSegment `protobuf:"bytes,1,opt,name=segment" json:"segment,omitempty"` // Confidence that the label is accurate. Range: [0, 1]. Confidence float32 `protobuf:"fixed32,2,opt,name=confidence" json:"confidence,omitempty"` } func (m *LabelSegment) Reset() { *m = LabelSegment{} } func (m *LabelSegment) String() string { return proto.CompactTextString(m) } func (*LabelSegment) ProtoMessage() {} func (*LabelSegment) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } func (m *LabelSegment) GetSegment() *VideoSegment { if m != nil { return m.Segment } return nil } func (m *LabelSegment) GetConfidence() float32 { if m != nil { return m.Confidence } return 0 } // Video frame level annotation results for label detection. type LabelFrame struct { // Time-offset, relative to the beginning of the video, corresponding to the // video frame for this location. TimeOffset *google_protobuf3.Duration `protobuf:"bytes,1,opt,name=time_offset,json=timeOffset" json:"time_offset,omitempty"` // Confidence that the label is accurate. Range: [0, 1]. Confidence float32 `protobuf:"fixed32,2,opt,name=confidence" json:"confidence,omitempty"` } func (m *LabelFrame) Reset() { *m = LabelFrame{} } func (m *LabelFrame) String() string { return proto.CompactTextString(m) } func (*LabelFrame) ProtoMessage() {} func (*LabelFrame) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } func (m *LabelFrame) GetTimeOffset() *google_protobuf3.Duration { if m != nil { return m.TimeOffset } return nil } func (m *LabelFrame) GetConfidence() float32 { if m != nil { return m.Confidence } return 0 } // Detected entity from video analysis. type Entity struct { // Opaque entity ID. Some IDs may be available in // [Google Knowledge Graph Search // API](https://developers.google.com/knowledge-graph/). EntityId string `protobuf:"bytes,1,opt,name=entity_id,json=entityId" json:"entity_id,omitempty"` // Textual description, e.g. `Fixed-gear bicycle`. Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` // Language code for `description` in BCP-47 format. LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode" json:"language_code,omitempty"` } func (m *Entity) Reset() { *m = Entity{} } func (m *Entity) String() string { return proto.CompactTextString(m) } func (*Entity) ProtoMessage() {} func (*Entity) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } func (m *Entity) GetEntityId() string { if m != nil { return m.EntityId } return "" } func (m *Entity) GetDescription() string { if m != nil { return m.Description } return "" } func (m *Entity) GetLanguageCode() string { if m != nil { return m.LanguageCode } return "" } // Label annotation. type LabelAnnotation struct { // Detected entity. Entity *Entity `protobuf:"bytes,1,opt,name=entity" json:"entity,omitempty"` // Common categories for the detected entity. // E.g. when the label is `Terrier` the category is likely `dog`. And in some // cases there might be more than one categories e.g. `Terrier` could also be // a `pet`. CategoryEntities []*Entity `protobuf:"bytes,2,rep,name=category_entities,json=categoryEntities" json:"category_entities,omitempty"` // All video segments where a label was detected. Segments []*LabelSegment `protobuf:"bytes,3,rep,name=segments" json:"segments,omitempty"` // All video frames where a label was detected. Frames []*LabelFrame `protobuf:"bytes,4,rep,name=frames" json:"frames,omitempty"` } func (m *LabelAnnotation) Reset() { *m = LabelAnnotation{} } func (m *LabelAnnotation) String() string { return proto.CompactTextString(m) } func (*LabelAnnotation) ProtoMessage() {} func (*LabelAnnotation) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } func (m *LabelAnnotation) GetEntity() *Entity { if m != nil { return m.Entity } return nil } func (m *LabelAnnotation) GetCategoryEntities() []*Entity { if m != nil { return m.CategoryEntities } return nil } func (m *LabelAnnotation) GetSegments() []*LabelSegment { if m != nil { return m.Segments } return nil } func (m *LabelAnnotation) GetFrames() []*LabelFrame { if m != nil { return m.Frames } return nil } // Video frame level annotation results for explicit content. type ExplicitContentFrame struct { // Time-offset, relative to the beginning of the video, corresponding to the // video frame for this location. TimeOffset *google_protobuf3.Duration `protobuf:"bytes,1,opt,name=time_offset,json=timeOffset" json:"time_offset,omitempty"` // Likelihood of the pornography content.. PornographyLikelihood Likelihood `protobuf:"varint,2,opt,name=pornography_likelihood,json=pornographyLikelihood,enum=google.cloud.videointelligence.v1beta2.Likelihood" json:"pornography_likelihood,omitempty"` } func (m *ExplicitContentFrame) Reset() { *m = ExplicitContentFrame{} } func (m *ExplicitContentFrame) String() string { return proto.CompactTextString(m) } func (*ExplicitContentFrame) ProtoMessage() {} func (*ExplicitContentFrame) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } func (m *ExplicitContentFrame) GetTimeOffset() *google_protobuf3.Duration { if m != nil { return m.TimeOffset } return nil } func (m *ExplicitContentFrame) GetPornographyLikelihood() Likelihood { if m != nil { return m.PornographyLikelihood } return Likelihood_LIKELIHOOD_UNSPECIFIED } // Explicit content annotation (based on per-frame visual signals only). // If no explicit content has been detected in a frame, no annotations are // present for that frame. type ExplicitContentAnnotation struct { // All video frames where explicit content was detected. Frames []*ExplicitContentFrame `protobuf:"bytes,1,rep,name=frames" json:"frames,omitempty"` } func (m *ExplicitContentAnnotation) Reset() { *m = ExplicitContentAnnotation{} } func (m *ExplicitContentAnnotation) String() string { return proto.CompactTextString(m) } func (*ExplicitContentAnnotation) ProtoMessage() {} func (*ExplicitContentAnnotation) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } func (m *ExplicitContentAnnotation) GetFrames() []*ExplicitContentFrame { if m != nil { return m.Frames } return nil } // Normalized bounding box. // The normalized vertex coordinates are relative to the original image. // Range: [0, 1]. type NormalizedBoundingBox struct { // Left X coordinate. Left float32 `protobuf:"fixed32,1,opt,name=left" json:"left,omitempty"` // Top Y coordinate. Top float32 `protobuf:"fixed32,2,opt,name=top" json:"top,omitempty"` // Right X coordinate. Right float32 `protobuf:"fixed32,3,opt,name=right" json:"right,omitempty"` // Bottom Y coordinate. Bottom float32 `protobuf:"fixed32,4,opt,name=bottom" json:"bottom,omitempty"` } func (m *NormalizedBoundingBox) Reset() { *m = NormalizedBoundingBox{} } func (m *NormalizedBoundingBox) String() string { return proto.CompactTextString(m) } func (*NormalizedBoundingBox) ProtoMessage() {} func (*NormalizedBoundingBox) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } func (m *NormalizedBoundingBox) GetLeft() float32 { if m != nil { return m.Left } return 0 } func (m *NormalizedBoundingBox) GetTop() float32 { if m != nil { return m.Top } return 0 } func (m *NormalizedBoundingBox) GetRight() float32 { if m != nil { return m.Right } return 0 } func (m *NormalizedBoundingBox) GetBottom() float32 { if m != nil { return m.Bottom } return 0 } // Video segment level annotation results for face detection. type FaceSegment struct { // Video segment where a face was detected. Segment *VideoSegment `protobuf:"bytes,1,opt,name=segment" json:"segment,omitempty"` } func (m *FaceSegment) Reset() { *m = FaceSegment{} } func (m *FaceSegment) String() string { return proto.CompactTextString(m) } func (*FaceSegment) ProtoMessage() {} func (*FaceSegment) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } func (m *FaceSegment) GetSegment() *VideoSegment { if m != nil { return m.Segment } return nil } // Video frame level annotation results for face detection. type FaceFrame struct { // Normalized Bounding boxes in a frame. // There can be more than one boxes if the same face is detected in multiple // locations within the current frame. NormalizedBoundingBoxes []*NormalizedBoundingBox `protobuf:"bytes,1,rep,name=normalized_bounding_boxes,json=normalizedBoundingBoxes" json:"normalized_bounding_boxes,omitempty"` // Time-offset, relative to the beginning of the video, // corresponding to the video frame for this location. TimeOffset *google_protobuf3.Duration `protobuf:"bytes,2,opt,name=time_offset,json=timeOffset" json:"time_offset,omitempty"` } func (m *FaceFrame) Reset() { *m = FaceFrame{} } func (m *FaceFrame) String() string { return proto.CompactTextString(m) } func (*FaceFrame) ProtoMessage() {} func (*FaceFrame) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } func (m *FaceFrame) GetNormalizedBoundingBoxes() []*NormalizedBoundingBox { if m != nil { return m.NormalizedBoundingBoxes } return nil } func (m *FaceFrame) GetTimeOffset() *google_protobuf3.Duration { if m != nil { return m.TimeOffset } return nil } // Face annotation. type FaceAnnotation struct { // Thumbnail of a representative face view (in JPEG format). Encoding: base64. Thumbnail []byte `protobuf:"bytes,1,opt,name=thumbnail,proto3" json:"thumbnail,omitempty"` // All video segments where a face was detected. Segments []*FaceSegment `protobuf:"bytes,2,rep,name=segments" json:"segments,omitempty"` // All video frames where a face was detected. Frames []*FaceFrame `protobuf:"bytes,3,rep,name=frames" json:"frames,omitempty"` } func (m *FaceAnnotation) Reset() { *m = FaceAnnotation{} } func (m *FaceAnnotation) String() string { return proto.CompactTextString(m) } func (*FaceAnnotation) ProtoMessage() {} func (*FaceAnnotation) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } func (m *FaceAnnotation) GetThumbnail() []byte { if m != nil { return m.Thumbnail } return nil } func (m *FaceAnnotation) GetSegments() []*FaceSegment { if m != nil { return m.Segments } return nil } func (m *FaceAnnotation) GetFrames() []*FaceFrame { if m != nil { return m.Frames } return nil } // Annotation results for a single video. type VideoAnnotationResults struct { // Video file location in // [Google Cloud Storage](https://cloud.google.com/storage/). InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri" json:"input_uri,omitempty"` // Label annotations on video level or user specified segment level. // There is exactly one element for each unique label. SegmentLabelAnnotations []*LabelAnnotation `protobuf:"bytes,2,rep,name=segment_label_annotations,json=segmentLabelAnnotations" json:"segment_label_annotations,omitempty"` // Label annotations on shot level. // There is exactly one element for each unique label. ShotLabelAnnotations []*LabelAnnotation `protobuf:"bytes,3,rep,name=shot_label_annotations,json=shotLabelAnnotations" json:"shot_label_annotations,omitempty"` // Label annotations on frame level. // There is exactly one element for each unique label. FrameLabelAnnotations []*LabelAnnotation `protobuf:"bytes,4,rep,name=frame_label_annotations,json=frameLabelAnnotations" json:"frame_label_annotations,omitempty"` // Face annotations. There is exactly one element for each unique face. FaceAnnotations []*FaceAnnotation `protobuf:"bytes,5,rep,name=face_annotations,json=faceAnnotations" json:"face_annotations,omitempty"` // Shot annotations. Each shot is represented as a video segment. ShotAnnotations []*VideoSegment `protobuf:"bytes,6,rep,name=shot_annotations,json=shotAnnotations" json:"shot_annotations,omitempty"` // Explicit content annotation. ExplicitAnnotation *ExplicitContentAnnotation `protobuf:"bytes,7,opt,name=explicit_annotation,json=explicitAnnotation" json:"explicit_annotation,omitempty"` // If set, indicates an error. Note that for a single `AnnotateVideoRequest` // some videos may succeed and some may fail. Error *google_rpc.Status `protobuf:"bytes,9,opt,name=error" json:"error,omitempty"` } func (m *VideoAnnotationResults) Reset() { *m = VideoAnnotationResults{} } func (m *VideoAnnotationResults) String() string { return proto.CompactTextString(m) } func (*VideoAnnotationResults) ProtoMessage() {} func (*VideoAnnotationResults) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } func (m *VideoAnnotationResults) GetInputUri() string { if m != nil { return m.InputUri } return "" } func (m *VideoAnnotationResults) GetSegmentLabelAnnotations() []*LabelAnnotation { if m != nil { return m.SegmentLabelAnnotations } return nil } func (m *VideoAnnotationResults) GetShotLabelAnnotations() []*LabelAnnotation { if m != nil { return m.ShotLabelAnnotations } return nil } func (m *VideoAnnotationResults) GetFrameLabelAnnotations() []*LabelAnnotation { if m != nil { return m.FrameLabelAnnotations } return nil } func (m *VideoAnnotationResults) GetFaceAnnotations() []*FaceAnnotation { if m != nil { return m.FaceAnnotations } return nil } func (m *VideoAnnotationResults) GetShotAnnotations() []*VideoSegment { if m != nil { return m.ShotAnnotations } return nil } func (m *VideoAnnotationResults) GetExplicitAnnotation() *ExplicitContentAnnotation { if m != nil { return m.ExplicitAnnotation } return nil } func (m *VideoAnnotationResults) GetError() *google_rpc.Status { if m != nil { return m.Error } return nil } // Video annotation response. Included in the `response` // field of the `Operation` returned by the `GetOperation` // call of the `google::longrunning::Operations` service. type AnnotateVideoResponse struct { // Annotation results for all videos specified in `AnnotateVideoRequest`. AnnotationResults []*VideoAnnotationResults `protobuf:"bytes,1,rep,name=annotation_results,json=annotationResults" json:"annotation_results,omitempty"` } func (m *AnnotateVideoResponse) Reset() { *m = AnnotateVideoResponse{} } func (m *AnnotateVideoResponse) String() string { return proto.CompactTextString(m) } func (*AnnotateVideoResponse) ProtoMessage() {} func (*AnnotateVideoResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } func (m *AnnotateVideoResponse) GetAnnotationResults() []*VideoAnnotationResults { if m != nil { return m.AnnotationResults } return nil } // Annotation progress for a single video. type VideoAnnotationProgress struct { // Video file location in // [Google Cloud Storage](https://cloud.google.com/storage/). InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri" json:"input_uri,omitempty"` // Approximate percentage processed thus far. // Guaranteed to be 100 when fully processed. ProgressPercent int32 `protobuf:"varint,2,opt,name=progress_percent,json=progressPercent" json:"progress_percent,omitempty"` // Time when the request was received. StartTime *google_protobuf4.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime" json:"start_time,omitempty"` // Time of the most recent update. UpdateTime *google_protobuf4.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` } func (m *VideoAnnotationProgress) Reset() { *m = VideoAnnotationProgress{} } func (m *VideoAnnotationProgress) String() string { return proto.CompactTextString(m) } func (*VideoAnnotationProgress) ProtoMessage() {} func (*VideoAnnotationProgress) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } func (m *VideoAnnotationProgress) GetInputUri() string { if m != nil { return m.InputUri } return "" } func (m *VideoAnnotationProgress) GetProgressPercent() int32 { if m != nil { return m.ProgressPercent } return 0 } func (m *VideoAnnotationProgress) GetStartTime() *google_protobuf4.Timestamp { if m != nil { return m.StartTime } return nil } func (m *VideoAnnotationProgress) GetUpdateTime() *google_protobuf4.Timestamp { if m != nil { return m.UpdateTime } return nil } // Video annotation progress. Included in the `metadata` // field of the `Operation` returned by the `GetOperation` // call of the `google::longrunning::Operations` service. type AnnotateVideoProgress struct { // Progress metadata for all videos specified in `AnnotateVideoRequest`. AnnotationProgress []*VideoAnnotationProgress `protobuf:"bytes,1,rep,name=annotation_progress,json=annotationProgress" json:"annotation_progress,omitempty"` } func (m *AnnotateVideoProgress) Reset() { *m = AnnotateVideoProgress{} } func (m *AnnotateVideoProgress) String() string { return proto.CompactTextString(m) } func (*AnnotateVideoProgress) ProtoMessage() {} func (*AnnotateVideoProgress) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} } func (m *AnnotateVideoProgress) GetAnnotationProgress() []*VideoAnnotationProgress { if m != nil { return m.AnnotationProgress } return nil } func init() { proto.RegisterType((*AnnotateVideoRequest)(nil), "google.cloud.videointelligence.v1beta2.AnnotateVideoRequest") proto.RegisterType((*VideoContext)(nil), "google.cloud.videointelligence.v1beta2.VideoContext") proto.RegisterType((*LabelDetectionConfig)(nil), "google.cloud.videointelligence.v1beta2.LabelDetectionConfig") proto.RegisterType((*ShotChangeDetectionConfig)(nil), "google.cloud.videointelligence.v1beta2.ShotChangeDetectionConfig") proto.RegisterType((*ExplicitContentDetectionConfig)(nil), "google.cloud.videointelligence.v1beta2.ExplicitContentDetectionConfig") proto.RegisterType((*FaceDetectionConfig)(nil), "google.cloud.videointelligence.v1beta2.FaceDetectionConfig") proto.RegisterType((*VideoSegment)(nil), "google.cloud.videointelligence.v1beta2.VideoSegment") proto.RegisterType((*LabelSegment)(nil), "google.cloud.videointelligence.v1beta2.LabelSegment") proto.RegisterType((*LabelFrame)(nil), "google.cloud.videointelligence.v1beta2.LabelFrame") proto.RegisterType((*Entity)(nil), "google.cloud.videointelligence.v1beta2.Entity") proto.RegisterType((*LabelAnnotation)(nil), "google.cloud.videointelligence.v1beta2.LabelAnnotation") proto.RegisterType((*ExplicitContentFrame)(nil), "google.cloud.videointelligence.v1beta2.ExplicitContentFrame") proto.RegisterType((*ExplicitContentAnnotation)(nil), "google.cloud.videointelligence.v1beta2.ExplicitContentAnnotation") proto.RegisterType((*NormalizedBoundingBox)(nil), "google.cloud.videointelligence.v1beta2.NormalizedBoundingBox") proto.RegisterType((*FaceSegment)(nil), "google.cloud.videointelligence.v1beta2.FaceSegment") proto.RegisterType((*FaceFrame)(nil), "google.cloud.videointelligence.v1beta2.FaceFrame") proto.RegisterType((*FaceAnnotation)(nil), "google.cloud.videointelligence.v1beta2.FaceAnnotation") proto.RegisterType((*VideoAnnotationResults)(nil), "google.cloud.videointelligence.v1beta2.VideoAnnotationResults") proto.RegisterType((*AnnotateVideoResponse)(nil), "google.cloud.videointelligence.v1beta2.AnnotateVideoResponse") proto.RegisterType((*VideoAnnotationProgress)(nil), "google.cloud.videointelligence.v1beta2.VideoAnnotationProgress") proto.RegisterType((*AnnotateVideoProgress)(nil), "google.cloud.videointelligence.v1beta2.AnnotateVideoProgress") proto.RegisterEnum("google.cloud.videointelligence.v1beta2.Feature", Feature_name, Feature_value) proto.RegisterEnum("google.cloud.videointelligence.v1beta2.LabelDetectionMode", LabelDetectionMode_name, LabelDetectionMode_value) proto.RegisterEnum("google.cloud.videointelligence.v1beta2.Likelihood", Likelihood_name, Likelihood_value) } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // Client API for VideoIntelligenceService service type VideoIntelligenceServiceClient interface { // Performs asynchronous video annotation. Progress and results can be // retrieved through the `google.longrunning.Operations` interface. // `Operation.metadata` contains `AnnotateVideoProgress` (progress). // `Operation.response` contains `AnnotateVideoResponse` (results). AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*google_longrunning.Operation, error) } type videoIntelligenceServiceClient struct { cc *grpc.ClientConn } func NewVideoIntelligenceServiceClient(cc *grpc.ClientConn) VideoIntelligenceServiceClient { return &videoIntelligenceServiceClient{cc} } func (c *videoIntelligenceServiceClient) AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*google_longrunning.Operation, error) { out := new(google_longrunning.Operation) err := grpc.Invoke(ctx, "/google.cloud.videointelligence.v1beta2.VideoIntelligenceService/AnnotateVideo", in, out, c.cc, opts...) if err != nil { return nil, err } return out, nil } // Server API for VideoIntelligenceService service type VideoIntelligenceServiceServer interface { // Performs asynchronous video annotation. Progress and results can be // retrieved through the `google.longrunning.Operations` interface. // `Operation.metadata` contains `AnnotateVideoProgress` (progress). // `Operation.response` contains `AnnotateVideoResponse` (results). AnnotateVideo(context.Context, *AnnotateVideoRequest) (*google_longrunning.Operation, error) } func RegisterVideoIntelligenceServiceServer(s *grpc.Server, srv VideoIntelligenceServiceServer) { s.RegisterService(&_VideoIntelligenceService_serviceDesc, srv) } func _VideoIntelligenceService_AnnotateVideo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(AnnotateVideoRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/google.cloud.videointelligence.v1beta2.VideoIntelligenceService/AnnotateVideo", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, req.(*AnnotateVideoRequest)) } return interceptor(ctx, in, info, handler) } var _VideoIntelligenceService_serviceDesc = grpc.ServiceDesc{ ServiceName: "google.cloud.videointelligence.v1beta2.VideoIntelligenceService", HandlerType: (*VideoIntelligenceServiceServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "AnnotateVideo", Handler: _VideoIntelligenceService_AnnotateVideo_Handler, }, }, Streams: []grpc.StreamDesc{}, Metadata: "google/cloud/videointelligence/v1beta2/video_intelligence.proto", } func init() { proto.RegisterFile("google/cloud/videointelligence/v1beta2/video_intelligence.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ // 1702 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x58, 0x4f, 0x6f, 0xdb, 0xc8, 0x15, 0x2f, 0x25, 0x59, 0xb1, 0x9e, 0x64, 0x4b, 0x19, 0xcb, 0xb6, 0xec, 0x26, 0x5e, 0x97, 0x29, 0x16, 0xae, 0x0b, 0x48, 0x88, 0x77, 0xb1, 0x45, 0x93, 0x6d, 0x17, 0xb2, 0x4c, 0x6d, 0xd4, 0x75, 0x24, 0x81, 0x52, 0xd2, 0xa6, 0x45, 0x41, 0x50, 0xe4, 0x88, 0x22, 0x96, 0xe2, 0x70, 0xc9, 0x61, 0x10, 0xf7, 0xd0, 0xc3, 0x1e, 0x16, 0xe8, 0xb1, 0xe8, 0xa5, 0x9f, 0xa1, 0x87, 0x7e, 0x83, 0x02, 0x45, 0x2f, 0x05, 0x7a, 0xe9, 0xa1, 0xbd, 0xf4, 0xd2, 0x53, 0x8f, 0xfd, 0x10, 0x05, 0x67, 0x86, 0x12, 0x45, 0xc9, 0xb1, 0x94, 0xa0, 0x37, 0xce, 0x7b, 0xf3, 0x7e, 0xef, 0xff, 0x9b, 0x19, 0xc2, 0x67, 0x16, 0x21, 0x96, 0x83, 0x1b, 0x86, 0x43, 0x42, 0xb3, 0xf1, 0xda, 0x36, 0x31, 0xb1, 0x5d, 0x8a, 0x1d, 0xc7, 0xb6, 0xb0, 0x6b, 0xe0, 0xc6, 0xeb, 0xc7, 0x23, 0x4c, 0xf5, 0x0b, 0xce, 0xd1, 0x92, 0xac, 0xba, 0xe7, 0x13, 0x4a, 0xd0, 0x87, 0x1c, 0xa0, 0xce, 0x00, 0xea, 0x4b, 0x00, 0x75, 0x01, 0x70, 0xfc, 0x40, 0x28, 0xd2, 0x3d, 0xbb, 0xa1, 0xbb, 0x2e, 0xa1, 0x3a, 0xb5, 0x89, 0x1b, 0x70, 0x94, 0xe3, 0x47, 0x82, 0xeb, 0x10, 0xd7, 0xf2, 0x43, 0xd7, 0xb5, 0x5d, 0xab, 0x41, 0x3c, 0xec, 0x2f, 0x6c, 0x3a, 0x11, 0x9b, 0xd8, 0x6a, 0x14, 0x8e, 0x1b, 0x66, 0xc8, 0x37, 0x08, 0xfe, 0x07, 0x69, 0x3e, 0xb5, 0xa7, 0x38, 0xa0, 0xfa, 0xd4, 0x13, 0x1b, 0x0e, 0xc5, 0x06, 0xdf, 0x33, 0x1a, 0x01, 0xd5, 0x69, 0x28, 0x90, 0xe5, 0x3f, 0x67, 0xa0, 0xda, 0xe4, 0x46, 0xe1, 0x97, 0x91, 0x0b, 0x2a, 0xfe, 0x2a, 0xc4, 0x01, 0x45, 0xdf, 0x86, 0x82, 0xed, 0x7a, 0x21, 0xd5, 0x42, 0xdf, 0xae, 0x49, 0xa7, 0xd2, 0x59, 0x41, 0xdd, 0x66, 0x84, 0x17, 0xbe, 0x8d, 0x1e, 0xc1, 0x0e, 0x67, 0x1a, 0xc4, 0xa5, 0xd8, 0xa5, 0xb5, 0xfc, 0xa9, 0x74, 0x56, 0x52, 0x4b, 0x8c, 0xd8, 0xe2, 0x34, 0xf4, 0x05, 0x6c, 0x8f, 0xb1, 0x4e, 0x43, 0x1f, 0x07, 0xb5, 0xcc, 0x69, 0xf6, 0x6c, 0xf7, 0xa2, 0x51, 0x5f, 0x2f, 0x64, 0xf5, 0x36, 0x97, 0x53, 0x67, 0x00, 0xe8, 0x15, 0xec, 0xf0, 0x44, 0x30, 0x8d, 0x6f, 0x68, 0x2d, 0x7b, 0x2a, 0x9d, 0x15, 0x2f, 0x3e, 0x5e, 0x17, 0x91, 0xf9, 0xd6, 0xe2, 0xb2, 0x6a, 0xe9, 0x75, 0x62, 0x85, 0x1e, 0x02, 0x90, 0x90, 0xc6, 0xae, 0xe6, 0x98, 0xab, 0x05, 0x4e, 0x89, 0x7c, 0xfd, 0x00, 0x8a, 0x0e, 0x31, 0x58, 0xb4, 0x35, 0xdb, 0xac, 0x6d, 0x31, 0x3e, 0xc4, 0xa4, 0x8e, 0x29, 0xff, 0x3b, 0x07, 0xa5, 0x24, 0x3c, 0xea, 0xc3, 0x76, 0x80, 0xad, 0x29, 0x76, 0x69, 0x50, 0x93, 0x4e, 0xb3, 0x1b, 0x9b, 0x39, 0xe0, 0xc2, 0xea, 0x0c, 0x05, 0xf9, 0x70, 0xe0, 0xe8, 0x23, 0xec, 0x68, 0x26, 0xa6, 0xd8, 0x60, 0xa6, 0x18, 0xc4, 0x1d, 0xdb, 0x56, 0x2d, 0xc3, 0xc2, 0xf0, 0xe9, 0xba, 0xf8, 0xd7, 0x11, 0xca, 0x55, 0x0c, 0xd2, 0x62, 0x18, 0x6a, 0xd5, 0x59, 0x41, 0x45, 0x5f, 0x4b, 0xf0, 0x20, 0x98, 0x10, 0xaa, 0x19, 0x13, 0xdd, 0xb5, 0xf0, 0xb2, 0x6a, 0x9e, 0x81, 0xe6, 0xba, 0xaa, 0x07, 0x13, 0x42, 0x5b, 0x0c, 0x2a, 0xad, 0xff, 0x28, 0xb8, 0x8d, 0x85, 0x7e, 0x2b, 0xc1, 0x77, 0xf0, 0x1b, 0xcf, 0xb1, 0x0d, 0x7b, 0x56, 0x6c, 0xcb, 0x96, 0xe4, 0x98, 0x25, 0xed, 0x75, 0x2d, 0x51, 0x04, 0xa0, 0x28, 0xd4, 0xb4, 0x39, 0x27, 0xf8, 0xad, 0x7c, 0x44, 0x60, 0x7f, 0xac, 0x1b, 0x2b, 0x02, 0xb2, 0xc5, 0xcc, 0x78, 0xba, 0x76, 0x91, 0xeb, 0xc6, 0x52, 0x28, 0xf6, 0xc6, 0xcb, 0x44, 0xf9, 0xaf, 0x12, 0x54, 0x57, 0x25, 0x0e, 0x39, 0x50, 0x4d, 0x97, 0xc5, 0x94, 0x98, 0x98, 0xb5, 0xeb, 0xee, 0xc5, 0x93, 0x77, 0x2b, 0x8a, 0xe7, 0xc4, 0xc4, 0x2a, 0x72, 0x96, 0x68, 0xe8, 0xfb, 0x70, 0x3f, 0xe0, 0xb3, 0x4b, 0xf7, 0x6f, 0x34, 0x43, 0x9f, 0x62, 0x5f, 0x67, 0xf5, 0xb7, 0xad, 0x56, 0xe6, 0x8c, 0x16, 0xa3, 0xa3, 0x2a, 0x6c, 0x45, 0xa6, 0x38, 0xac, 0x4a, 0x0a, 0x2a, 0x5f, 0xc8, 0x8f, 0xe1, 0xe8, 0xd6, 0x32, 0x98, 0x8b, 0x48, 0x49, 0x91, 0x4f, 0xe0, 0xe4, 0xed, 0xf9, 0xba, 0x45, 0x4e, 0x87, 0xbd, 0x15, 0x01, 0x5e, 0xbd, 0x19, 0x7d, 0x0c, 0x07, 0xb6, 0x6b, 0x38, 0xa1, 0x89, 0xb5, 0x11, 0x09, 0x5d, 0xd3, 0x76, 0x2d, 0x6d, 0x44, 0xde, 0xb0, 0xc1, 0x15, 0xf9, 0x57, 0x15, 0xdc, 0x4b, 0xc1, 0xbc, 0x8c, 0x78, 0xf2, 0xef, 0x25, 0xd1, 0xf8, 0xa2, 0x61, 0x91, 0xc2, 0x22, 0xe4, 0x53, 0x2d, 0x1a, 0xbf, 0x1a, 0x19, 0x8f, 0x03, 0x4c, 0x99, 0xa2, 0xe2, 0xc5, 0x51, 0x9c, 0x8c, 0x78, 0x44, 0xd7, 0xaf, 0xc4, 0x08, 0x57, 0xcb, 0x4c, 0x66, 0x68, 0x4f, 0x71, 0x8f, 0x49, 0xa0, 0x26, 0x94, 0xb1, 0x6b, 0x2e, 0x80, 0x64, 0xee, 0x02, 0xd9, 0xc1, 0xae, 0x39, 0x87, 0x90, 0x7f, 0x0d, 0x25, 0x96, 0xd5, 0xd8, 0xb2, 0x2e, 0xdc, 0x13, 0xc3, 0x44, 0xd8, 0xf3, 0x6e, 0x13, 0x29, 0x06, 0x41, 0x27, 0x00, 0xac, 0xe8, 0xcd, 0x68, 0x2f, 0xb3, 0x2e, 0xa3, 0x26, 0x28, 0xf2, 0x04, 0x80, 0xe9, 0x6f, 0xfb, 0xfa, 0x14, 0xa3, 0x27, 0x50, 0xdc, 0x28, 0x22, 0x40, 0xe7, 0xc1, 0xb8, 0x4b, 0x93, 0x03, 0x79, 0xc5, 0xa5, 0x36, 0xbd, 0x89, 0x4e, 0x2c, 0xcc, 0xbe, 0xa2, 0x31, 0x2d, 0x4e, 0x2c, 0x4e, 0xe8, 0x98, 0xe8, 0x14, 0x8a, 0x26, 0x0e, 0x0c, 0xdf, 0xf6, 0x22, 0x0d, 0x0c, 0xa7, 0xa0, 0x26, 0x49, 0xd1, 0x99, 0xe6, 0xe8, 0xae, 0x15, 0xea, 0x16, 0xd6, 0x8c, 0xa8, 0x8b, 0x78, 0xe5, 0x96, 0x62, 0x62, 0x8b, 0x98, 0x58, 0xfe, 0x67, 0x06, 0xca, 0xcc, 0xb1, 0xe6, 0xec, 0x20, 0x47, 0x6d, 0xc8, 0x73, 0x35, 0xc2, 0xb1, 0xfa, 0xda, 0x73, 0x88, 0x49, 0xa9, 0x42, 0x1a, 0xfd, 0x02, 0xee, 0x1b, 0x3a, 0xc5, 0x16, 0xf1, 0x6f, 0x34, 0x46, 0xb2, 0xc5, 0xc1, 0xb9, 0x39, 0x64, 0x25, 0x06, 0x52, 0x04, 0xce, 0xc2, 0x99, 0x94, 0xdd, 0xec, 0x4c, 0x4a, 0x16, 0x52, 0xe2, 0x4c, 0xfa, 0x09, 0xe4, 0xc7, 0x51, 0x76, 0x83, 0x5a, 0x8e, 0xe1, 0x5d, 0x6c, 0x84, 0xc7, 0x0a, 0x43, 0x15, 0x08, 0xf2, 0x9f, 0x24, 0xa8, 0xa6, 0xba, 0xfc, 0xfd, 0x2b, 0xc7, 0x86, 0x03, 0x8f, 0xf8, 0x2e, 0xb1, 0x7c, 0xdd, 0x9b, 0xdc, 0x68, 0x8e, 0xfd, 0x25, 0x76, 0xec, 0x09, 0x21, 0x26, 0xcb, 0xfe, 0xee, 0x06, 0x06, 0xcf, 0x24, 0xd5, 0xfd, 0x04, 0xe2, 0x9c, 0x2c, 0x7f, 0x05, 0x47, 0x29, 0xf3, 0x13, 0xf5, 0x31, 0x9c, 0x05, 0x8a, 0x5f, 0x06, 0x3e, 0x7d, 0xc7, 0x73, 0x6a, 0x31, 0x64, 0x5f, 0xc2, 0x7e, 0x97, 0xf8, 0x53, 0xdd, 0xb1, 0x7f, 0x85, 0xcd, 0xc4, 0x5c, 0x42, 0x08, 0x72, 0x0e, 0x1e, 0xf3, 0x58, 0x65, 0x54, 0xf6, 0x8d, 0x2a, 0x90, 0xa5, 0xc4, 0x13, 0xdd, 0x13, 0x7d, 0x46, 0x73, 0xd0, 0xb7, 0xad, 0x09, 0xbf, 0x47, 0x65, 0x54, 0xbe, 0x40, 0x07, 0x90, 0x1f, 0x11, 0x4a, 0xc9, 0x94, 0x1d, 0xa9, 0x19, 0x55, 0xac, 0xe4, 0x5f, 0x42, 0x31, 0x1a, 0xa6, 0xff, 0xa7, 0x69, 0x22, 0xff, 0x45, 0x82, 0x42, 0x84, 0xcf, 0x73, 0x7e, 0x03, 0x47, 0xee, 0xcc, 0xb3, 0xf4, 0x3c, 0xe6, 0x21, 0xfc, 0xd1, 0xba, 0xfa, 0x56, 0x86, 0x48, 0x3d, 0x74, 0x57, 0x91, 0x71, 0x90, 0x2e, 0xb7, 0xcc, 0x06, 0xe5, 0x26, 0xff, 0x4d, 0x82, 0xdd, 0xc8, 0x89, 0x44, 0xe6, 0x1f, 0x40, 0x81, 0x4e, 0xc2, 0xe9, 0xc8, 0xd5, 0x6d, 0x7e, 0xe0, 0x94, 0xd4, 0x39, 0x01, 0xf5, 0x12, 0x2d, 0xc9, 0xdb, 0xfc, 0xa3, 0x4d, 0xae, 0x0e, 0xcb, 0x1d, 0xd9, 0x99, 0x15, 0x1a, 0xef, 0xf0, 0xc7, 0x9b, 0xc0, 0x2d, 0x56, 0xd7, 0x7f, 0xb7, 0xe0, 0x80, 0xe5, 0x6a, 0xee, 0x8d, 0x8a, 0x83, 0xd0, 0xa1, 0xc1, 0xdb, 0x1f, 0x06, 0x01, 0x1c, 0x09, 0x73, 0x34, 0x7e, 0x33, 0x49, 0x3c, 0x78, 0x84, 0x93, 0x3f, 0xd8, 0x68, 0x4e, 0x24, 0xf4, 0x1f, 0x0a, 0xe4, 0x14, 0x3d, 0x40, 0x53, 0x38, 0x60, 0x17, 0xd5, 0x65, 0x8d, 0xd9, 0xf7, 0xd3, 0x58, 0x8d, 0x60, 0x97, 0xd4, 0x11, 0x38, 0x64, 0x51, 0x5a, 0xa1, 0x2f, 0xf7, 0x7e, 0xfa, 0xf6, 0x19, 0xee, 0x92, 0x42, 0x1d, 0x2a, 0xec, 0xc2, 0x99, 0xd4, 0xb4, 0xc5, 0x34, 0x7d, 0xb2, 0x49, 0x86, 0x13, 0x8a, 0xca, 0xe3, 0x85, 0x75, 0x80, 0x34, 0xa8, 0xb0, 0x10, 0x26, 0x55, 0xe4, 0xdf, 0xe3, 0xe9, 0x52, 0x8e, 0xd0, 0x92, 0x0a, 0x7c, 0xd8, 0x9b, 0xdd, 0xe3, 0xe7, 0x4a, 0x6a, 0xf7, 0x36, 0x7b, 0x43, 0xdc, 0x3a, 0x64, 0x55, 0x14, 0xa3, 0x27, 0xda, 0xef, 0x0c, 0xb6, 0xb0, 0xef, 0x13, 0xbf, 0x56, 0x60, 0x5a, 0x50, 0xac, 0xc5, 0xf7, 0x8c, 0xfa, 0x80, 0x3d, 0x82, 0x55, 0xbe, 0x41, 0xfe, 0x46, 0x82, 0xfd, 0xd4, 0x2b, 0x38, 0xf0, 0x88, 0x1b, 0x60, 0x34, 0x05, 0x34, 0x37, 0x57, 0xf3, 0x79, 0x0f, 0x88, 0x29, 0xf4, 0xe3, 0x8d, 0x42, 0xb3, 0xd4, 0x49, 0xea, 0x7d, 0x3d, 0x4d, 0x92, 0xff, 0x25, 0xc1, 0x61, 0x6a, 0x77, 0xdf, 0x27, 0x96, 0x8f, 0x83, 0x3b, 0x1a, 0xef, 0x7b, 0x50, 0xf1, 0xc4, 0x46, 0xcd, 0xc3, 0xbe, 0x11, 0xcd, 0xe6, 0x68, 0x7c, 0x6d, 0xa9, 0xe5, 0x98, 0xde, 0xe7, 0x64, 0xf4, 0x43, 0x80, 0xf9, 0x2d, 0x55, 0xbc, 0xe2, 0x8e, 0x97, 0x66, 0xdc, 0x30, 0xfe, 0x83, 0xa0, 0x16, 0x66, 0xf7, 0x53, 0xf4, 0x14, 0x8a, 0xa1, 0x67, 0xea, 0x14, 0x73, 0xd9, 0xdc, 0x9d, 0xb2, 0xc0, 0xb7, 0x47, 0x04, 0xf9, 0x37, 0xe9, 0x20, 0xcf, 0x3c, 0xf3, 0x60, 0x2f, 0x11, 0xe4, 0xd8, 0x5e, 0x11, 0xe5, 0xcf, 0xde, 0x31, 0xca, 0x31, 0xba, 0x9a, 0x48, 0x60, 0x4c, 0x3b, 0xff, 0x46, 0x82, 0x7b, 0xe2, 0x27, 0x03, 0x3a, 0x84, 0xbd, 0xb6, 0xd2, 0x1c, 0xbe, 0x50, 0x15, 0xed, 0x45, 0x77, 0xd0, 0x57, 0x5a, 0x9d, 0x76, 0x47, 0xb9, 0xaa, 0x7c, 0x0b, 0xed, 0x41, 0xf9, 0xba, 0x79, 0xa9, 0x5c, 0x6b, 0x57, 0xca, 0x50, 0x69, 0x0d, 0x3b, 0xbd, 0x6e, 0x45, 0x42, 0x47, 0xb0, 0x3f, 0x78, 0xd6, 0x1b, 0x6a, 0xad, 0x67, 0xcd, 0xee, 0xe7, 0x4a, 0x82, 0x95, 0x41, 0x27, 0x70, 0xac, 0xfc, 0xac, 0x7f, 0xdd, 0x69, 0x75, 0x86, 0x5a, 0xab, 0xd7, 0x1d, 0x2a, 0xdd, 0x61, 0x82, 0x9f, 0x45, 0x08, 0x76, 0xdb, 0xcd, 0x56, 0x52, 0x26, 0x77, 0xee, 0x03, 0x5a, 0x7e, 0x7e, 0xa1, 0xef, 0xc2, 0x69, 0x4a, 0xb3, 0xf6, 0xbc, 0x77, 0x95, 0xb6, 0x6f, 0x07, 0x0a, 0xcc, 0x94, 0x88, 0x55, 0x91, 0xd0, 0x2e, 0x40, 0x5b, 0x6d, 0x3e, 0x57, 0xf8, 0x3a, 0x13, 0xf9, 0xc5, 0xd8, 0xcd, 0xee, 0x95, 0x96, 0x60, 0x64, 0xcf, 0x29, 0xc0, 0xfc, 0xee, 0x82, 0x8e, 0xe1, 0xe0, 0xba, 0xf3, 0x85, 0x72, 0xdd, 0x79, 0xd6, 0xeb, 0x5d, 0xa5, 0x34, 0xdc, 0x87, 0x9d, 0x97, 0x8a, 0xfa, 0x4a, 0x7b, 0xd1, 0x65, 0x5b, 0x5e, 0x55, 0x24, 0x54, 0x82, 0xed, 0xd9, 0x2a, 0x13, 0xad, 0xfa, 0xbd, 0xc1, 0xa0, 0x73, 0x79, 0xad, 0x54, 0xb2, 0x08, 0x20, 0x2f, 0x38, 0x39, 0x54, 0x86, 0x22, 0x13, 0x15, 0x84, 0xad, 0x8b, 0x3f, 0x4a, 0x50, 0x63, 0x29, 0xea, 0x24, 0x92, 0x37, 0xc0, 0xfe, 0x6b, 0xdb, 0xc0, 0xd1, 0x3b, 0x7f, 0x67, 0xa1, 0x36, 0xd0, 0xda, 0xb7, 0xa4, 0x55, 0x7f, 0xaf, 0x8e, 0x1f, 0xc6, 0xd2, 0x89, 0xdf, 0x6a, 0xf5, 0x5e, 0xfc, 0x5b, 0x4d, 0x7e, 0xf4, 0xf5, 0x3f, 0xfe, 0xf3, 0xbb, 0xcc, 0x43, 0xb9, 0xb6, 0xf8, 0x97, 0x2f, 0x78, 0x22, 0x4a, 0x05, 0x3f, 0x91, 0xce, 0x2f, 0xff, 0x2e, 0xc1, 0xb9, 0x41, 0xa6, 0x6b, 0xda, 0x71, 0xf9, 0xf0, 0x36, 0xe7, 0xfa, 0x51, 0x5b, 0xf4, 0xa5, 0x9f, 0xff, 0x54, 0x00, 0x59, 0x24, 0x7a, 0x52, 0xd4, 0x89, 0x6f, 0x35, 0x2c, 0xec, 0xb2, 0xa6, 0x69, 0x70, 0x96, 0xee, 0xd9, 0xc1, 0x5d, 0xff, 0x23, 0x9f, 0x2e, 0x71, 0xfe, 0x90, 0xf9, 0xf0, 0x73, 0x8e, 0xdc, 0x62, 0x26, 0x2e, 0xd9, 0x51, 0x7f, 0xc9, 0x45, 0x47, 0x79, 0xa6, 0xec, 0xa3, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0x5d, 0x2f, 0xdf, 0xfc, 0xfb, 0x14, 0x00, 0x00, }