1// Copyright 2020 Google LLC.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5// Code generated file. DO NOT EDIT.
6
7// Package speech provides access to the Cloud Speech-to-Text API.
8//
9// This package is DEPRECATED. Use package cloud.google.com/go/speech/apiv1 instead.
10//
11// For product documentation, see: https://cloud.google.com/speech-to-text/docs/quickstart-protocol
12//
13// Creating a client
14//
15// Usage example:
16//
17//   import "google.golang.org/api/speech/v1p1beta1"
18//   ...
19//   ctx := context.Background()
20//   speechService, err := speech.NewService(ctx)
21//
22// In this example, Google Application Default Credentials are used for authentication.
23//
24// For information on how to create and obtain Application Default Credentials, see https://developers.google.com/identity/protocols/application-default-credentials.
25//
26// Other authentication options
27//
28// To use an API key for authentication (note: some APIs do not support API keys), use option.WithAPIKey:
29//
30//   speechService, err := speech.NewService(ctx, option.WithAPIKey("AIza..."))
31//
32// To use an OAuth token (e.g., a user token obtained via a three-legged OAuth flow), use option.WithTokenSource:
33//
34//   config := &oauth2.Config{...}
35//   // ...
36//   token, err := config.Exchange(ctx, ...)
37//   speechService, err := speech.NewService(ctx, option.WithTokenSource(config.TokenSource(ctx, token)))
38//
39// See https://godoc.org/google.golang.org/api/option/ for details on options.
40package speech // import "google.golang.org/api/speech/v1p1beta1"
41
42import (
43	"bytes"
44	"context"
45	"encoding/json"
46	"errors"
47	"fmt"
48	"io"
49	"net/http"
50	"net/url"
51	"strconv"
52	"strings"
53
54	googleapi "google.golang.org/api/googleapi"
55	gensupport "google.golang.org/api/internal/gensupport"
56	option "google.golang.org/api/option"
57	internaloption "google.golang.org/api/option/internaloption"
58	htransport "google.golang.org/api/transport/http"
59)
60
61// Always reference these packages, just in case the auto-generated code
62// below doesn't.
63var _ = bytes.NewBuffer
64var _ = strconv.Itoa
65var _ = fmt.Sprintf
66var _ = json.NewDecoder
67var _ = io.Copy
68var _ = url.Parse
69var _ = gensupport.MarshalJSON
70var _ = googleapi.Version
71var _ = errors.New
72var _ = strings.Replace
73var _ = context.Canceled
74var _ = internaloption.WithDefaultEndpoint
75
76const apiId = "speech:v1p1beta1"
77const apiName = "speech"
78const apiVersion = "v1p1beta1"
79const basePath = "https://speech.googleapis.com/"
80
81// OAuth2 scopes used by this API.
82const (
83	// View and manage your data across Google Cloud Platform services
84	CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform"
85)
86
87// NewService creates a new Service.
88func NewService(ctx context.Context, opts ...option.ClientOption) (*Service, error) {
89	scopesOption := option.WithScopes(
90		"https://www.googleapis.com/auth/cloud-platform",
91	)
92	// NOTE: prepend, so we don't override user-specified scopes.
93	opts = append([]option.ClientOption{scopesOption}, opts...)
94	opts = append(opts, internaloption.WithDefaultEndpoint(basePath))
95	client, endpoint, err := htransport.NewClient(ctx, opts...)
96	if err != nil {
97		return nil, err
98	}
99	s, err := New(client)
100	if err != nil {
101		return nil, err
102	}
103	if endpoint != "" {
104		s.BasePath = endpoint
105	}
106	return s, nil
107}
108
109// New creates a new Service. It uses the provided http.Client for requests.
110//
111// Deprecated: please use NewService instead.
112// To provide a custom HTTP client, use option.WithHTTPClient.
113// If you are using google.golang.org/api/googleapis/transport.APIKey, use option.WithAPIKey with NewService instead.
114func New(client *http.Client) (*Service, error) {
115	if client == nil {
116		return nil, errors.New("client is nil")
117	}
118	s := &Service{client: client, BasePath: basePath}
119	s.Operations = NewOperationsService(s)
120	s.Projects = NewProjectsService(s)
121	s.Speech = NewSpeechService(s)
122	return s, nil
123}
124
125type Service struct {
126	client    *http.Client
127	BasePath  string // API endpoint base URL
128	UserAgent string // optional additional User-Agent fragment
129
130	Operations *OperationsService
131
132	Projects *ProjectsService
133
134	Speech *SpeechService
135}
136
137func (s *Service) userAgent() string {
138	if s.UserAgent == "" {
139		return googleapi.UserAgent
140	}
141	return googleapi.UserAgent + " " + s.UserAgent
142}
143
144func NewOperationsService(s *Service) *OperationsService {
145	rs := &OperationsService{s: s}
146	return rs
147}
148
149type OperationsService struct {
150	s *Service
151}
152
153func NewProjectsService(s *Service) *ProjectsService {
154	rs := &ProjectsService{s: s}
155	rs.Locations = NewProjectsLocationsService(s)
156	return rs
157}
158
159type ProjectsService struct {
160	s *Service
161
162	Locations *ProjectsLocationsService
163}
164
165func NewProjectsLocationsService(s *Service) *ProjectsLocationsService {
166	rs := &ProjectsLocationsService{s: s}
167	rs.Operations = NewProjectsLocationsOperationsService(s)
168	return rs
169}
170
171type ProjectsLocationsService struct {
172	s *Service
173
174	Operations *ProjectsLocationsOperationsService
175}
176
177func NewProjectsLocationsOperationsService(s *Service) *ProjectsLocationsOperationsService {
178	rs := &ProjectsLocationsOperationsService{s: s}
179	return rs
180}
181
182type ProjectsLocationsOperationsService struct {
183	s *Service
184}
185
186func NewSpeechService(s *Service) *SpeechService {
187	rs := &SpeechService{s: s}
188	return rs
189}
190
191type SpeechService struct {
192	s *Service
193}
194
195// ListOperationsResponse: The response message for
196// Operations.ListOperations.
197type ListOperationsResponse struct {
198	// NextPageToken: The standard List next-page token.
199	NextPageToken string `json:"nextPageToken,omitempty"`
200
201	// Operations: A list of operations that matches the specified filter in
202	// the request.
203	Operations []*Operation `json:"operations,omitempty"`
204
205	// ServerResponse contains the HTTP response code and headers from the
206	// server.
207	googleapi.ServerResponse `json:"-"`
208
209	// ForceSendFields is a list of field names (e.g. "NextPageToken") to
210	// unconditionally include in API requests. By default, fields with
211	// empty values are omitted from API requests. However, any non-pointer,
212	// non-interface field appearing in ForceSendFields will be sent to the
213	// server regardless of whether the field is empty or not. This may be
214	// used to include empty fields in Patch requests.
215	ForceSendFields []string `json:"-"`
216
217	// NullFields is a list of field names (e.g. "NextPageToken") to include
218	// in API requests with the JSON null value. By default, fields with
219	// empty values are omitted from API requests. However, any field with
220	// an empty value appearing in NullFields will be sent to the server as
221	// null. It is an error if a field in this list has a non-empty value.
222	// This may be used to include null fields in Patch requests.
223	NullFields []string `json:"-"`
224}
225
226func (s *ListOperationsResponse) MarshalJSON() ([]byte, error) {
227	type NoMethod ListOperationsResponse
228	raw := NoMethod(*s)
229	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
230}
231
232// LongRunningRecognizeMetadata: Describes the progress of a
233// long-running `LongRunningRecognize` call. It is
234// included in the `metadata` field of the `Operation` returned by
235// the
236// `GetOperation` call of the `google::longrunning::Operations` service.
237type LongRunningRecognizeMetadata struct {
238	// LastUpdateTime: Time of the most recent processing update.
239	LastUpdateTime string `json:"lastUpdateTime,omitempty"`
240
241	// ProgressPercent: Approximate percentage of audio processed thus far.
242	// Guaranteed to be 100
243	// when the audio is fully processed and the results are available.
244	ProgressPercent int64 `json:"progressPercent,omitempty"`
245
246	// StartTime: Time when the request was received.
247	StartTime string `json:"startTime,omitempty"`
248
249	// ForceSendFields is a list of field names (e.g. "LastUpdateTime") to
250	// unconditionally include in API requests. By default, fields with
251	// empty values are omitted from API requests. However, any non-pointer,
252	// non-interface field appearing in ForceSendFields will be sent to the
253	// server regardless of whether the field is empty or not. This may be
254	// used to include empty fields in Patch requests.
255	ForceSendFields []string `json:"-"`
256
257	// NullFields is a list of field names (e.g. "LastUpdateTime") to
258	// include in API requests with the JSON null value. By default, fields
259	// with empty values are omitted from API requests. However, any field
260	// with an empty value appearing in NullFields will be sent to the
261	// server as null. It is an error if a field in this list has a
262	// non-empty value. This may be used to include null fields in Patch
263	// requests.
264	NullFields []string `json:"-"`
265}
266
267func (s *LongRunningRecognizeMetadata) MarshalJSON() ([]byte, error) {
268	type NoMethod LongRunningRecognizeMetadata
269	raw := NoMethod(*s)
270	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
271}
272
273// LongRunningRecognizeRequest: The top-level message sent by the client
274// for the `LongRunningRecognize`
275// method.
276type LongRunningRecognizeRequest struct {
277	// Audio: Required. The audio data to be recognized.
278	Audio *RecognitionAudio `json:"audio,omitempty"`
279
280	// Config: Required. Provides information to the recognizer that
281	// specifies how to
282	// process the request.
283	Config *RecognitionConfig `json:"config,omitempty"`
284
285	// ForceSendFields is a list of field names (e.g. "Audio") to
286	// unconditionally include in API requests. By default, fields with
287	// empty values are omitted from API requests. However, any non-pointer,
288	// non-interface field appearing in ForceSendFields will be sent to the
289	// server regardless of whether the field is empty or not. This may be
290	// used to include empty fields in Patch requests.
291	ForceSendFields []string `json:"-"`
292
293	// NullFields is a list of field names (e.g. "Audio") to include in API
294	// requests with the JSON null value. By default, fields with empty
295	// values are omitted from API requests. However, any field with an
296	// empty value appearing in NullFields will be sent to the server as
297	// null. It is an error if a field in this list has a non-empty value.
298	// This may be used to include null fields in Patch requests.
299	NullFields []string `json:"-"`
300}
301
302func (s *LongRunningRecognizeRequest) MarshalJSON() ([]byte, error) {
303	type NoMethod LongRunningRecognizeRequest
304	raw := NoMethod(*s)
305	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
306}
307
308// LongRunningRecognizeResponse: The only message returned to the client
309// by the `LongRunningRecognize` method.
310// It contains the result as zero or more sequential
311// `SpeechRecognitionResult`
312// messages. It is included in the `result.response` field of the
313// `Operation`
314// returned by the `GetOperation` call of the
315// `google::longrunning::Operations`
316// service.
317type LongRunningRecognizeResponse struct {
318	// Results: Sequential list of transcription results corresponding
319	// to
320	// sequential portions of audio.
321	Results []*SpeechRecognitionResult `json:"results,omitempty"`
322
323	// ForceSendFields is a list of field names (e.g. "Results") to
324	// unconditionally include in API requests. By default, fields with
325	// empty values are omitted from API requests. However, any non-pointer,
326	// non-interface field appearing in ForceSendFields will be sent to the
327	// server regardless of whether the field is empty or not. This may be
328	// used to include empty fields in Patch requests.
329	ForceSendFields []string `json:"-"`
330
331	// NullFields is a list of field names (e.g. "Results") to include in
332	// API requests with the JSON null value. By default, fields with empty
333	// values are omitted from API requests. However, any field with an
334	// empty value appearing in NullFields will be sent to the server as
335	// null. It is an error if a field in this list has a non-empty value.
336	// This may be used to include null fields in Patch requests.
337	NullFields []string `json:"-"`
338}
339
340func (s *LongRunningRecognizeResponse) MarshalJSON() ([]byte, error) {
341	type NoMethod LongRunningRecognizeResponse
342	raw := NoMethod(*s)
343	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
344}
345
346// Operation: This resource represents a long-running operation that is
347// the result of a
348// network API call.
349type Operation struct {
350	// Done: If the value is `false`, it means the operation is still in
351	// progress.
352	// If `true`, the operation is completed, and either `error` or
353	// `response` is
354	// available.
355	Done bool `json:"done,omitempty"`
356
357	// Error: The error result of the operation in case of failure or
358	// cancellation.
359	Error *Status `json:"error,omitempty"`
360
361	// Metadata: Service-specific metadata associated with the operation.
362	// It typically
363	// contains progress information and common metadata such as create
364	// time.
365	// Some services might not provide such metadata.  Any method that
366	// returns a
367	// long-running operation should document the metadata type, if any.
368	Metadata googleapi.RawMessage `json:"metadata,omitempty"`
369
370	// Name: The server-assigned name, which is only unique within the same
371	// service that
372	// originally returns it. If you use the default HTTP mapping,
373	// the
374	// `name` should be a resource name ending with
375	// `operations/{unique_id}`.
376	Name string `json:"name,omitempty"`
377
378	// Response: The normal response of the operation in case of success.
379	// If the original
380	// method returns no data on success, such as `Delete`, the response
381	// is
382	// `google.protobuf.Empty`.  If the original method is
383	// standard
384	// `Get`/`Create`/`Update`, the response should be the resource.  For
385	// other
386	// methods, the response should have the type `XxxResponse`, where
387	// `Xxx`
388	// is the original method name.  For example, if the original method
389	// name
390	// is `TakeSnapshot()`, the inferred response type
391	// is
392	// `TakeSnapshotResponse`.
393	Response googleapi.RawMessage `json:"response,omitempty"`
394
395	// ServerResponse contains the HTTP response code and headers from the
396	// server.
397	googleapi.ServerResponse `json:"-"`
398
399	// ForceSendFields is a list of field names (e.g. "Done") to
400	// unconditionally include in API requests. By default, fields with
401	// empty values are omitted from API requests. However, any non-pointer,
402	// non-interface field appearing in ForceSendFields will be sent to the
403	// server regardless of whether the field is empty or not. This may be
404	// used to include empty fields in Patch requests.
405	ForceSendFields []string `json:"-"`
406
407	// NullFields is a list of field names (e.g. "Done") to include in API
408	// requests with the JSON null value. By default, fields with empty
409	// values are omitted from API requests. However, any field with an
410	// empty value appearing in NullFields will be sent to the server as
411	// null. It is an error if a field in this list has a non-empty value.
412	// This may be used to include null fields in Patch requests.
413	NullFields []string `json:"-"`
414}
415
416func (s *Operation) MarshalJSON() ([]byte, error) {
417	type NoMethod Operation
418	raw := NoMethod(*s)
419	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
420}
421
422// RecognitionAudio: Contains audio data in the encoding specified in
423// the `RecognitionConfig`.
424// Either `content` or `uri` must be supplied. Supplying both or
425// neither
426// returns google.rpc.Code.INVALID_ARGUMENT. See
427// [content
428// limits](https://cloud.google.com/speech-to-text/quotas#content).
429type RecognitionAudio struct {
430	// Content: The audio data bytes encoded as specified
431	// in
432	// `RecognitionConfig`. Note: as with all bytes fields, proto buffers
433	// use a
434	// pure binary representation, whereas JSON representations use base64.
435	Content string `json:"content,omitempty"`
436
437	// Uri: URI that points to a file that contains audio data bytes as
438	// specified in
439	// `RecognitionConfig`. The file must not be compressed (for example,
440	// gzip).
441	// Currently, only Google Cloud Storage URIs are
442	// supported, which must be specified in the following
443	// format:
444	// `gs://bucket_name/object_name` (other URI formats
445	// return
446	// google.rpc.Code.INVALID_ARGUMENT). For more information, see
447	// [Request URIs](https://cloud.google.com/storage/docs/reference-uris).
448	Uri string `json:"uri,omitempty"`
449
450	// ForceSendFields is a list of field names (e.g. "Content") to
451	// unconditionally include in API requests. By default, fields with
452	// empty values are omitted from API requests. However, any non-pointer,
453	// non-interface field appearing in ForceSendFields will be sent to the
454	// server regardless of whether the field is empty or not. This may be
455	// used to include empty fields in Patch requests.
456	ForceSendFields []string `json:"-"`
457
458	// NullFields is a list of field names (e.g. "Content") to include in
459	// API requests with the JSON null value. By default, fields with empty
460	// values are omitted from API requests. However, any field with an
461	// empty value appearing in NullFields will be sent to the server as
462	// null. It is an error if a field in this list has a non-empty value.
463	// This may be used to include null fields in Patch requests.
464	NullFields []string `json:"-"`
465}
466
467func (s *RecognitionAudio) MarshalJSON() ([]byte, error) {
468	type NoMethod RecognitionAudio
469	raw := NoMethod(*s)
470	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
471}
472
473// RecognitionConfig: Provides information to the recognizer that
474// specifies how to process the
475// request.
476type RecognitionConfig struct {
477	// AlternativeLanguageCodes: A list of up to 3
478	// additional
479	// [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language
480	// tags,
481	// listing possible alternative languages of the supplied audio.
482	// See
483	// [Language
484	// Support](https://cloud.google.com/speech-to-text/docs/langua
485	// ges) for a list
486	// of the currently supported language codes. If alternative languages
487	// are
488	// listed, recognition result will contain recognition in the most
489	// likely
490	// language detected including the main language_code. The recognition
491	// result
492	// will include the language tag of the language detected in the audio.
493	// Note:
494	// This feature is only supported for Voice Command and Voice Search use
495	// cases
496	// and performance may vary for other use cases (e.g., phone
497	// call
498	// transcription).
499	AlternativeLanguageCodes []string `json:"alternativeLanguageCodes,omitempty"`
500
501	// AudioChannelCount: The number of channels in the input audio
502	// data.
503	// ONLY set this for MULTI-CHANNEL recognition.
504	// Valid values for LINEAR16 and FLAC are `1`-`8`.
505	// Valid values for OGG_OPUS are '1'-'254'.
506	// Valid value for MULAW, AMR, AMR_WB and SPEEX_WITH_HEADER_BYTE is only
507	// `1`.
508	// If `0` or omitted, defaults to one channel (mono).
509	// Note: We only recognize the first channel by default.
510	// To perform independent recognition on each channel
511	// set
512	// `enable_separate_recognition_per_channel` to 'true'.
513	AudioChannelCount int64 `json:"audioChannelCount,omitempty"`
514
515	// DiarizationConfig: Config to enable speaker diarization and set
516	// additional
517	// parameters to make diarization better suited for your
518	// application.
519	// Note: When this is enabled, we send all the words from the beginning
520	// of the
521	// audio for the top alternative in every consecutive STREAMING
522	// responses.
523	// This is done in order to improve our speaker tags as our models learn
524	// to
525	// identify the speakers in the conversation over time.
526	// For non-streaming requests, the diarization results will be provided
527	// only
528	// in the top alternative of the FINAL SpeechRecognitionResult.
529	DiarizationConfig *SpeakerDiarizationConfig `json:"diarizationConfig,omitempty"`
530
531	// DiarizationSpeakerCount: If set, specifies the estimated number of
532	// speakers in the conversation.
533	// Defaults to '2'. Ignored unless enable_speaker_diarization is set to
534	// true.
535	// Note: Use diarization_config instead.
536	DiarizationSpeakerCount int64 `json:"diarizationSpeakerCount,omitempty"`
537
538	// EnableAutomaticPunctuation: If 'true', adds punctuation to
539	// recognition result hypotheses.
540	// This feature is only available in select languages. Setting this
541	// for
542	// requests in other languages has no effect at all.
543	// The default 'false' value does not add punctuation to result
544	// hypotheses.
545	// Note: This is currently offered as an experimental service,
546	// complimentary
547	// to all users. In the future this may be exclusively available as
548	// a
549	// premium feature.
550	EnableAutomaticPunctuation bool `json:"enableAutomaticPunctuation,omitempty"`
551
552	// EnableSeparateRecognitionPerChannel: This needs to be set to `true`
553	// explicitly and `audio_channel_count` > 1
554	// to get each channel recognized separately. The recognition result
555	// will
556	// contain a `channel_tag` field to state which channel that result
557	// belongs
558	// to. If this is not true, we will only recognize the first channel.
559	// The
560	// request is billed cumulatively for all channels
561	// recognized:
562	// `audio_channel_count` multiplied by the length of the audio.
563	EnableSeparateRecognitionPerChannel bool `json:"enableSeparateRecognitionPerChannel,omitempty"`
564
565	// EnableSpeakerDiarization: If 'true', enables speaker detection for
566	// each recognized word in
567	// the top alternative of the recognition result using a speaker_tag
568	// provided
569	// in the WordInfo.
570	// Note: Use diarization_config instead.
571	EnableSpeakerDiarization bool `json:"enableSpeakerDiarization,omitempty"`
572
573	// EnableWordConfidence: If `true`, the top result includes a list of
574	// words and the
575	// confidence for those words. If `false`, no word-level
576	// confidence
577	// information is returned. The default is `false`.
578	EnableWordConfidence bool `json:"enableWordConfidence,omitempty"`
579
580	// EnableWordTimeOffsets: If `true`, the top result includes a list of
581	// words and
582	// the start and end time offsets (timestamps) for those words.
583	// If
584	// `false`, no word-level time offset information is returned. The
585	// default is
586	// `false`.
587	EnableWordTimeOffsets bool `json:"enableWordTimeOffsets,omitempty"`
588
589	// Encoding: Encoding of audio data sent in all `RecognitionAudio`
590	// messages.
591	// This field is optional for `FLAC` and `WAV` audio files and
592	// required
593	// for all other audio formats. For details, see AudioEncoding.
594	//
595	// Possible values:
596	//   "ENCODING_UNSPECIFIED" - Not specified.
597	//   "LINEAR16" - Uncompressed 16-bit signed little-endian samples
598	// (Linear PCM).
599	//   "FLAC" - `FLAC` (Free Lossless Audio
600	// Codec) is the recommended encoding because it is
601	// lossless--therefore recognition is not compromised--and
602	// requires only about half the bandwidth of `LINEAR16`. `FLAC`
603	// stream
604	// encoding supports 16-bit and 24-bit samples, however, not all fields
605	// in
606	// `STREAMINFO` are supported.
607	//   "MULAW" - 8-bit samples that compand 14-bit audio samples using
608	// G.711 PCMU/mu-law.
609	//   "AMR" - Adaptive Multi-Rate Narrowband codec. `sample_rate_hertz`
610	// must be 8000.
611	//   "AMR_WB" - Adaptive Multi-Rate Wideband codec. `sample_rate_hertz`
612	// must be 16000.
613	//   "OGG_OPUS" - Opus encoded audio frames in Ogg
614	// container
615	// ([OggOpus](https://wiki.xiph.org/OggOpus)).
616	// `sample_rate_her
617	// tz` must be one of 8000, 12000, 16000, 24000, or 48000.
618	//   "SPEEX_WITH_HEADER_BYTE" - Although the use of lossy encodings is
619	// not recommended, if a very low
620	// bitrate encoding is required, `OGG_OPUS` is highly preferred
621	// over
622	// Speex encoding. The [Speex](https://speex.org/)  encoding supported
623	// by
624	// Cloud Speech API has a header byte in each block, as in MIME
625	// type
626	// `audio/x-speex-with-header-byte`.
627	// It is a variant of the RTP Speex encoding defined in
628	// [RFC 5574](https://tools.ietf.org/html/rfc5574).
629	// The stream is a sequence of blocks, one block per RTP packet. Each
630	// block
631	// starts with a byte containing the length of the block, in bytes,
632	// followed
633	// by one or more frames of Speex data, padded to an integral number
634	// of
635	// bytes (octets) as specified in RFC 5574. In other words, each RTP
636	// header
637	// is replaced with a single byte containing the block length. Only
638	// Speex
639	// wideband is supported. `sample_rate_hertz` must be 16000.
640	//   "MP3" - MP3 audio. Support all standard MP3 bitrates (which range
641	// from 32-320
642	// kbps). When using this encoding, `sample_rate_hertz` has to match
643	// the
644	// sample rate of the file being used.
645	Encoding string `json:"encoding,omitempty"`
646
647	// LanguageCode: Required. The language of the supplied audio as
648	// a
649	// [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language
650	// tag.
651	// Example: "en-US".
652	// See
653	// [Language
654	// Support](https://cloud.google.com/speech-to-text/docs/langua
655	// ges) for a list
656	// of the currently supported language codes.
657	LanguageCode string `json:"languageCode,omitempty"`
658
659	// MaxAlternatives: Maximum number of recognition hypotheses to be
660	// returned.
661	// Specifically, the maximum number of `SpeechRecognitionAlternative`
662	// messages
663	// within each `SpeechRecognitionResult`.
664	// The server may return fewer than `max_alternatives`.
665	// Valid values are `0`-`30`. A value of `0` or `1` will return a
666	// maximum of
667	// one. If omitted, will return a maximum of one.
668	MaxAlternatives int64 `json:"maxAlternatives,omitempty"`
669
670	// Metadata: Metadata regarding this request.
671	Metadata *RecognitionMetadata `json:"metadata,omitempty"`
672
673	// Model: Which model to select for the given request. Select the
674	// model
675	// best suited to your domain to get best results. If a model is
676	// not
677	// explicitly specified, then we auto-select a model based on the
678	// parameters
679	// in the RecognitionConfig.
680	// <table>
681	//   <tr>
682	//     <td><b>Model</b></td>
683	//     <td><b>Description</b></td>
684	//   </tr>
685	//   <tr>
686	//     <td><code>command_and_search</code></td>
687	//     <td>Best for short queries such as voice commands or voice
688	// search.</td>
689	//   </tr>
690	//   <tr>
691	//     <td><code>phone_call</code></td>
692	//     <td>Best for audio that originated from a phone call (typically
693	//     recorded at an 8khz sampling rate).</td>
694	//   </tr>
695	//   <tr>
696	//     <td><code>video</code></td>
697	//     <td>Best for audio that originated from from video or includes
698	// multiple
699	//         speakers. Ideally the audio is recorded at a 16khz or
700	// greater
701	//         sampling rate. This is a premium model that costs more than
702	// the
703	//         standard rate.</td>
704	//   </tr>
705	//   <tr>
706	//     <td><code>default</code></td>
707	//     <td>Best for audio that is not one of the specific audio models.
708	//         For example, long-form audio. Ideally the audio is
709	// high-fidelity,
710	//         recorded at a 16khz or greater sampling rate.</td>
711	//   </tr>
712	// </table>
713	Model string `json:"model,omitempty"`
714
715	// ProfanityFilter: If set to `true`, the server will attempt to filter
716	// out
717	// profanities, replacing all but the initial character in each filtered
718	// word
719	// with asterisks, e.g. "f***". If set to `false` or omitted,
720	// profanities
721	// won't be filtered out.
722	ProfanityFilter bool `json:"profanityFilter,omitempty"`
723
724	// SampleRateHertz: Sample rate in Hertz of the audio data sent in
725	// all
726	// `RecognitionAudio` messages. Valid values are: 8000-48000.
727	// 16000 is optimal. For best results, set the sampling rate of the
728	// audio
729	// source to 16000 Hz. If that's not possible, use the native sample
730	// rate of
731	// the audio source (instead of re-sampling).
732	// This field is optional for FLAC and WAV audio files, but is
733	// required for all other audio formats. For details, see AudioEncoding.
734	SampleRateHertz int64 `json:"sampleRateHertz,omitempty"`
735
736	// SpeechContexts: Array of SpeechContext.
737	// A means to provide context to assist the speech recognition. For
738	// more
739	// information,
740	// see
741	// [speech
742	// adaptation](https://cloud.google.com/speech-to-text/docs/c
743	// ontext-strength).
744	SpeechContexts []*SpeechContext `json:"speechContexts,omitempty"`
745
746	// UseEnhanced: Set to true to use an enhanced model for speech
747	// recognition.
748	// If `use_enhanced` is set to true and the `model` field is not set,
749	// then
750	// an appropriate enhanced model is chosen if an enhanced model exists
751	// for
752	// the audio.
753	//
754	// If `use_enhanced` is true and an enhanced version of the specified
755	// model
756	// does not exist, then the speech is recognized using the standard
757	// version
758	// of the specified model.
759	UseEnhanced bool `json:"useEnhanced,omitempty"`
760
761	// ForceSendFields is a list of field names (e.g.
762	// "AlternativeLanguageCodes") to unconditionally include in API
763	// requests. By default, fields with empty values are omitted from API
764	// requests. However, any non-pointer, non-interface field appearing in
765	// ForceSendFields will be sent to the server regardless of whether the
766	// field is empty or not. This may be used to include empty fields in
767	// Patch requests.
768	ForceSendFields []string `json:"-"`
769
770	// NullFields is a list of field names (e.g. "AlternativeLanguageCodes")
771	// to include in API requests with the JSON null value. By default,
772	// fields with empty values are omitted from API requests. However, any
773	// field with an empty value appearing in NullFields will be sent to the
774	// server as null. It is an error if a field in this list has a
775	// non-empty value. This may be used to include null fields in Patch
776	// requests.
777	NullFields []string `json:"-"`
778}
779
780func (s *RecognitionConfig) MarshalJSON() ([]byte, error) {
781	type NoMethod RecognitionConfig
782	raw := NoMethod(*s)
783	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
784}
785
786// RecognitionMetadata: Description of audio data to be recognized.
787type RecognitionMetadata struct {
788	// AudioTopic: Description of the content. Eg. "Recordings of federal
789	// supreme court
790	// hearings from 2012".
791	AudioTopic string `json:"audioTopic,omitempty"`
792
793	// IndustryNaicsCodeOfAudio: The industry vertical to which this speech
794	// recognition request most
795	// closely applies. This is most indicative of the topics contained
796	// in the audio.  Use the 6-digit NAICS code to identify the
797	// industry
798	// vertical - see https://www.naics.com/search/.
799	IndustryNaicsCodeOfAudio int64 `json:"industryNaicsCodeOfAudio,omitempty"`
800
801	// InteractionType: The use case most closely describing the audio
802	// content to be recognized.
803	//
804	// Possible values:
805	//   "INTERACTION_TYPE_UNSPECIFIED" - Use case is either unknown or is
806	// something other than one of the other
807	// values below.
808	//   "DISCUSSION" - Multiple people in a conversation or discussion. For
809	// example in a
810	// meeting with two or more people actively participating. Typically
811	// all the primary people speaking would be in the same room (if
812	// not,
813	// see PHONE_CALL)
814	//   "PRESENTATION" - One or more persons lecturing or presenting to
815	// others, mostly
816	// uninterrupted.
817	//   "PHONE_CALL" - A phone-call or video-conference in which two or
818	// more people, who are
819	// not in the same room, are actively participating.
820	//   "VOICEMAIL" - A recorded message intended for another person to
821	// listen to.
822	//   "PROFESSIONALLY_PRODUCED" - Professionally produced audio (eg. TV
823	// Show, Podcast).
824	//   "VOICE_SEARCH" - Transcribe spoken questions and queries into text.
825	//   "VOICE_COMMAND" - Transcribe voice commands, such as for
826	// controlling a device.
827	//   "DICTATION" - Transcribe speech to text to create a written
828	// document, such as a
829	// text-message, email or report.
830	InteractionType string `json:"interactionType,omitempty"`
831
832	// MicrophoneDistance: The audio type that most closely describes the
833	// audio being recognized.
834	//
835	// Possible values:
836	//   "MICROPHONE_DISTANCE_UNSPECIFIED" - Audio type is not known.
837	//   "NEARFIELD" - The audio was captured from a closely placed
838	// microphone. Eg. phone,
839	// dictaphone, or handheld microphone. Generally if there speaker is
840	// within
841	// 1 meter of the microphone.
842	//   "MIDFIELD" - The speaker if within 3 meters of the microphone.
843	//   "FARFIELD" - The speaker is more than 3 meters away from the
844	// microphone.
845	MicrophoneDistance string `json:"microphoneDistance,omitempty"`
846
847	// ObfuscatedId: Obfuscated (privacy-protected) ID of the user, to
848	// identify number of
849	// unique users using the service.
850	ObfuscatedId int64 `json:"obfuscatedId,omitempty,string"`
851
852	// OriginalMediaType: The original media the speech was recorded on.
853	//
854	// Possible values:
855	//   "ORIGINAL_MEDIA_TYPE_UNSPECIFIED" - Unknown original media type.
856	//   "AUDIO" - The speech data is an audio recording.
857	//   "VIDEO" - The speech data originally recorded on a video.
858	OriginalMediaType string `json:"originalMediaType,omitempty"`
859
860	// OriginalMimeType: Mime type of the original audio file.  For example
861	// `audio/m4a`,
862	// `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
863	// A list of possible audio mime types is maintained
864	// at
865	// http://www.iana.org/assignments/media-types/media-types.xhtml#audio
866	OriginalMimeType string `json:"originalMimeType,omitempty"`
867
868	// RecordingDeviceName: The device used to make the recording.  Examples
869	// 'Nexus 5X' or
870	// 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
871	// 'Cardioid Microphone'.
872	RecordingDeviceName string `json:"recordingDeviceName,omitempty"`
873
874	// RecordingDeviceType: The type of device the speech was recorded with.
875	//
876	// Possible values:
877	//   "RECORDING_DEVICE_TYPE_UNSPECIFIED" - The recording device is
878	// unknown.
879	//   "SMARTPHONE" - Speech was recorded on a smartphone.
880	//   "PC" - Speech was recorded using a personal computer or tablet.
881	//   "PHONE_LINE" - Speech was recorded over a phone line.
882	//   "VEHICLE" - Speech was recorded in a vehicle.
883	//   "OTHER_OUTDOOR_DEVICE" - Speech was recorded outdoors.
884	//   "OTHER_INDOOR_DEVICE" - Speech was recorded indoors.
885	RecordingDeviceType string `json:"recordingDeviceType,omitempty"`
886
887	// ForceSendFields is a list of field names (e.g. "AudioTopic") to
888	// unconditionally include in API requests. By default, fields with
889	// empty values are omitted from API requests. However, any non-pointer,
890	// non-interface field appearing in ForceSendFields will be sent to the
891	// server regardless of whether the field is empty or not. This may be
892	// used to include empty fields in Patch requests.
893	ForceSendFields []string `json:"-"`
894
895	// NullFields is a list of field names (e.g. "AudioTopic") to include in
896	// API requests with the JSON null value. By default, fields with empty
897	// values are omitted from API requests. However, any field with an
898	// empty value appearing in NullFields will be sent to the server as
899	// null. It is an error if a field in this list has a non-empty value.
900	// This may be used to include null fields in Patch requests.
901	NullFields []string `json:"-"`
902}
903
904func (s *RecognitionMetadata) MarshalJSON() ([]byte, error) {
905	type NoMethod RecognitionMetadata
906	raw := NoMethod(*s)
907	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
908}
909
910// RecognizeRequest: The top-level message sent by the client for the
911// `Recognize` method.
912type RecognizeRequest struct {
913	// Audio: Required. The audio data to be recognized.
914	Audio *RecognitionAudio `json:"audio,omitempty"`
915
916	// Config: Required. Provides information to the recognizer that
917	// specifies how to
918	// process the request.
919	Config *RecognitionConfig `json:"config,omitempty"`
920
921	// ForceSendFields is a list of field names (e.g. "Audio") to
922	// unconditionally include in API requests. By default, fields with
923	// empty values are omitted from API requests. However, any non-pointer,
924	// non-interface field appearing in ForceSendFields will be sent to the
925	// server regardless of whether the field is empty or not. This may be
926	// used to include empty fields in Patch requests.
927	ForceSendFields []string `json:"-"`
928
929	// NullFields is a list of field names (e.g. "Audio") to include in API
930	// requests with the JSON null value. By default, fields with empty
931	// values are omitted from API requests. However, any field with an
932	// empty value appearing in NullFields will be sent to the server as
933	// null. It is an error if a field in this list has a non-empty value.
934	// This may be used to include null fields in Patch requests.
935	NullFields []string `json:"-"`
936}
937
938func (s *RecognizeRequest) MarshalJSON() ([]byte, error) {
939	type NoMethod RecognizeRequest
940	raw := NoMethod(*s)
941	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
942}
943
944// RecognizeResponse: The only message returned to the client by the
945// `Recognize` method. It
946// contains the result as zero or more sequential
947// `SpeechRecognitionResult`
948// messages.
949type RecognizeResponse struct {
950	// Results: Sequential list of transcription results corresponding
951	// to
952	// sequential portions of audio.
953	Results []*SpeechRecognitionResult `json:"results,omitempty"`
954
955	// ServerResponse contains the HTTP response code and headers from the
956	// server.
957	googleapi.ServerResponse `json:"-"`
958
959	// ForceSendFields is a list of field names (e.g. "Results") to
960	// unconditionally include in API requests. By default, fields with
961	// empty values are omitted from API requests. However, any non-pointer,
962	// non-interface field appearing in ForceSendFields will be sent to the
963	// server regardless of whether the field is empty or not. This may be
964	// used to include empty fields in Patch requests.
965	ForceSendFields []string `json:"-"`
966
967	// NullFields is a list of field names (e.g. "Results") to include in
968	// API requests with the JSON null value. By default, fields with empty
969	// values are omitted from API requests. However, any field with an
970	// empty value appearing in NullFields will be sent to the server as
971	// null. It is an error if a field in this list has a non-empty value.
972	// This may be used to include null fields in Patch requests.
973	NullFields []string `json:"-"`
974}
975
976func (s *RecognizeResponse) MarshalJSON() ([]byte, error) {
977	type NoMethod RecognizeResponse
978	raw := NoMethod(*s)
979	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
980}
981
982// SpeakerDiarizationConfig: Config to enable speaker diarization.
983type SpeakerDiarizationConfig struct {
984	// EnableSpeakerDiarization: If 'true', enables speaker detection for
985	// each recognized word in
986	// the top alternative of the recognition result using a speaker_tag
987	// provided
988	// in the WordInfo.
989	EnableSpeakerDiarization bool `json:"enableSpeakerDiarization,omitempty"`
990
991	// MaxSpeakerCount: Maximum number of speakers in the conversation. This
992	// range gives you more
993	// flexibility by allowing the system to automatically determine the
994	// correct
995	// number of speakers. If not set, the default value is 6.
996	MaxSpeakerCount int64 `json:"maxSpeakerCount,omitempty"`
997
998	// MinSpeakerCount: Minimum number of speakers in the conversation. This
999	// range gives you more
1000	// flexibility by allowing the system to automatically determine the
1001	// correct
1002	// number of speakers. If not set, the default value is 2.
1003	MinSpeakerCount int64 `json:"minSpeakerCount,omitempty"`
1004
1005	// SpeakerTag: Output only. Unused.
1006	SpeakerTag int64 `json:"speakerTag,omitempty"`
1007
1008	// ForceSendFields is a list of field names (e.g.
1009	// "EnableSpeakerDiarization") to unconditionally include in API
1010	// requests. By default, fields with empty values are omitted from API
1011	// requests. However, any non-pointer, non-interface field appearing in
1012	// ForceSendFields will be sent to the server regardless of whether the
1013	// field is empty or not. This may be used to include empty fields in
1014	// Patch requests.
1015	ForceSendFields []string `json:"-"`
1016
1017	// NullFields is a list of field names (e.g. "EnableSpeakerDiarization")
1018	// to include in API requests with the JSON null value. By default,
1019	// fields with empty values are omitted from API requests. However, any
1020	// field with an empty value appearing in NullFields will be sent to the
1021	// server as null. It is an error if a field in this list has a
1022	// non-empty value. This may be used to include null fields in Patch
1023	// requests.
1024	NullFields []string `json:"-"`
1025}
1026
1027func (s *SpeakerDiarizationConfig) MarshalJSON() ([]byte, error) {
1028	type NoMethod SpeakerDiarizationConfig
1029	raw := NoMethod(*s)
1030	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
1031}
1032
1033// SpeechContext: Provides "hints" to the speech recognizer to favor
1034// specific words and phrases
1035// in the results.
1036type SpeechContext struct {
1037	// Boost: Hint Boost. Positive value will increase the probability that
1038	// a specific
1039	// phrase will be recognized over other similar sounding phrases. The
1040	// higher
1041	// the boost, the higher the chance of false positive recognition as
1042	// well.
1043	// Negative boost values would correspond to anti-biasing. Anti-biasing
1044	// is not
1045	// enabled, so negative boost will simply be ignored. Though `boost`
1046	// can
1047	// accept a wide range of positive values, most use cases are best
1048	// served with
1049	// values between 0 and 20. We recommend using a binary search approach
1050	// to
1051	// finding the optimal value for your use case.
1052	Boost float64 `json:"boost,omitempty"`
1053
1054	// Phrases: A list of strings containing words and phrases "hints" so
1055	// that
1056	// the speech recognition is more likely to recognize them. This can be
1057	// used
1058	// to improve the accuracy for specific words and phrases, for example,
1059	// if
1060	// specific commands are typically spoken by the user. This can also be
1061	// used
1062	// to add additional words to the vocabulary of the recognizer.
1063	// See
1064	// [usage
1065	// limits](https://cloud.google.com/speech-to-text/quotas#content).
1066	//
1067	// List
1068	//  items can also be set to classes for groups of words that
1069	// represent
1070	// common concepts that occur in natural language. For example, rather
1071	// than
1072	// providing phrase hints for every month of the year, using the $MONTH
1073	// class
1074	// improves the likelihood of correctly transcribing audio that
1075	// includes
1076	// months.
1077	Phrases []string `json:"phrases,omitempty"`
1078
1079	// ForceSendFields is a list of field names (e.g. "Boost") to
1080	// unconditionally include in API requests. By default, fields with
1081	// empty values are omitted from API requests. However, any non-pointer,
1082	// non-interface field appearing in ForceSendFields will be sent to the
1083	// server regardless of whether the field is empty or not. This may be
1084	// used to include empty fields in Patch requests.
1085	ForceSendFields []string `json:"-"`
1086
1087	// NullFields is a list of field names (e.g. "Boost") to include in API
1088	// requests with the JSON null value. By default, fields with empty
1089	// values are omitted from API requests. However, any field with an
1090	// empty value appearing in NullFields will be sent to the server as
1091	// null. It is an error if a field in this list has a non-empty value.
1092	// This may be used to include null fields in Patch requests.
1093	NullFields []string `json:"-"`
1094}
1095
1096func (s *SpeechContext) MarshalJSON() ([]byte, error) {
1097	type NoMethod SpeechContext
1098	raw := NoMethod(*s)
1099	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
1100}
1101
1102func (s *SpeechContext) UnmarshalJSON(data []byte) error {
1103	type NoMethod SpeechContext
1104	var s1 struct {
1105		Boost gensupport.JSONFloat64 `json:"boost"`
1106		*NoMethod
1107	}
1108	s1.NoMethod = (*NoMethod)(s)
1109	if err := json.Unmarshal(data, &s1); err != nil {
1110		return err
1111	}
1112	s.Boost = float64(s1.Boost)
1113	return nil
1114}
1115
1116// SpeechRecognitionAlternative: Alternative hypotheses (a.k.a. n-best
1117// list).
1118type SpeechRecognitionAlternative struct {
1119	// Confidence: The confidence estimate between 0.0 and 1.0. A higher
1120	// number
1121	// indicates an estimated greater likelihood that the recognized words
1122	// are
1123	// correct. This field is set only for the top alternative of a
1124	// non-streaming
1125	// result or, of a streaming result where `is_final=true`.
1126	// This field is not guaranteed to be accurate and users should not rely
1127	// on it
1128	// to be always provided.
1129	// The default of 0.0 is a sentinel value indicating `confidence` was
1130	// not set.
1131	Confidence float64 `json:"confidence,omitempty"`
1132
1133	// Transcript: Transcript text representing the words that the user
1134	// spoke.
1135	Transcript string `json:"transcript,omitempty"`
1136
1137	// Words: A list of word-specific information for each recognized
1138	// word.
1139	// Note: When `enable_speaker_diarization` is true, you will see all the
1140	// words
1141	// from the beginning of the audio.
1142	Words []*WordInfo `json:"words,omitempty"`
1143
1144	// ForceSendFields is a list of field names (e.g. "Confidence") to
1145	// unconditionally include in API requests. By default, fields with
1146	// empty values are omitted from API requests. However, any non-pointer,
1147	// non-interface field appearing in ForceSendFields will be sent to the
1148	// server regardless of whether the field is empty or not. This may be
1149	// used to include empty fields in Patch requests.
1150	ForceSendFields []string `json:"-"`
1151
1152	// NullFields is a list of field names (e.g. "Confidence") to include in
1153	// API requests with the JSON null value. By default, fields with empty
1154	// values are omitted from API requests. However, any field with an
1155	// empty value appearing in NullFields will be sent to the server as
1156	// null. It is an error if a field in this list has a non-empty value.
1157	// This may be used to include null fields in Patch requests.
1158	NullFields []string `json:"-"`
1159}
1160
1161func (s *SpeechRecognitionAlternative) MarshalJSON() ([]byte, error) {
1162	type NoMethod SpeechRecognitionAlternative
1163	raw := NoMethod(*s)
1164	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
1165}
1166
1167func (s *SpeechRecognitionAlternative) UnmarshalJSON(data []byte) error {
1168	type NoMethod SpeechRecognitionAlternative
1169	var s1 struct {
1170		Confidence gensupport.JSONFloat64 `json:"confidence"`
1171		*NoMethod
1172	}
1173	s1.NoMethod = (*NoMethod)(s)
1174	if err := json.Unmarshal(data, &s1); err != nil {
1175		return err
1176	}
1177	s.Confidence = float64(s1.Confidence)
1178	return nil
1179}
1180
1181// SpeechRecognitionResult: A speech recognition result corresponding to
1182// a portion of the audio.
1183type SpeechRecognitionResult struct {
1184	// Alternatives: May contain one or more recognition hypotheses (up to
1185	// the
1186	// maximum specified in `max_alternatives`).
1187	// These alternatives are ordered in terms of accuracy, with the top
1188	// (first)
1189	// alternative being the most probable, as ranked by the recognizer.
1190	Alternatives []*SpeechRecognitionAlternative `json:"alternatives,omitempty"`
1191
1192	// ChannelTag: For multi-channel audio, this is the channel number
1193	// corresponding to the
1194	// recognized result for the audio from that channel.
1195	// For audio_channel_count = N, its output values can range from '1' to
1196	// 'N'.
1197	ChannelTag int64 `json:"channelTag,omitempty"`
1198
1199	// LanguageCode: Output only. The
1200	// [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language
1201	// tag
1202	// of the language in this result. This language code was detected to
1203	// have
1204	// the most likelihood of being spoken in the audio.
1205	LanguageCode string `json:"languageCode,omitempty"`
1206
1207	// ForceSendFields is a list of field names (e.g. "Alternatives") to
1208	// unconditionally include in API requests. By default, fields with
1209	// empty values are omitted from API requests. However, any non-pointer,
1210	// non-interface field appearing in ForceSendFields will be sent to the
1211	// server regardless of whether the field is empty or not. This may be
1212	// used to include empty fields in Patch requests.
1213	ForceSendFields []string `json:"-"`
1214
1215	// NullFields is a list of field names (e.g. "Alternatives") to include
1216	// in API requests with the JSON null value. By default, fields with
1217	// empty values are omitted from API requests. However, any field with
1218	// an empty value appearing in NullFields will be sent to the server as
1219	// null. It is an error if a field in this list has a non-empty value.
1220	// This may be used to include null fields in Patch requests.
1221	NullFields []string `json:"-"`
1222}
1223
1224func (s *SpeechRecognitionResult) MarshalJSON() ([]byte, error) {
1225	type NoMethod SpeechRecognitionResult
1226	raw := NoMethod(*s)
1227	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
1228}
1229
1230// Status: The `Status` type defines a logical error model that is
1231// suitable for
1232// different programming environments, including REST APIs and RPC APIs.
1233// It is
1234// used by [gRPC](https://github.com/grpc). Each `Status` message
1235// contains
1236// three pieces of data: error code, error message, and error
1237// details.
1238//
1239// You can find out more about this error model and how to work with it
1240// in the
1241// [API Design Guide](https://cloud.google.com/apis/design/errors).
1242type Status struct {
1243	// Code: The status code, which should be an enum value of
1244	// google.rpc.Code.
1245	Code int64 `json:"code,omitempty"`
1246
1247	// Details: A list of messages that carry the error details.  There is a
1248	// common set of
1249	// message types for APIs to use.
1250	Details []googleapi.RawMessage `json:"details,omitempty"`
1251
1252	// Message: A developer-facing error message, which should be in
1253	// English. Any
1254	// user-facing error message should be localized and sent in
1255	// the
1256	// google.rpc.Status.details field, or localized by the client.
1257	Message string `json:"message,omitempty"`
1258
1259	// ForceSendFields is a list of field names (e.g. "Code") to
1260	// unconditionally include in API requests. By default, fields with
1261	// empty values are omitted from API requests. However, any non-pointer,
1262	// non-interface field appearing in ForceSendFields will be sent to the
1263	// server regardless of whether the field is empty or not. This may be
1264	// used to include empty fields in Patch requests.
1265	ForceSendFields []string `json:"-"`
1266
1267	// NullFields is a list of field names (e.g. "Code") to include in API
1268	// requests with the JSON null value. By default, fields with empty
1269	// values are omitted from API requests. However, any field with an
1270	// empty value appearing in NullFields will be sent to the server as
1271	// null. It is an error if a field in this list has a non-empty value.
1272	// This may be used to include null fields in Patch requests.
1273	NullFields []string `json:"-"`
1274}
1275
1276func (s *Status) MarshalJSON() ([]byte, error) {
1277	type NoMethod Status
1278	raw := NoMethod(*s)
1279	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
1280}
1281
1282// WordInfo: Word-specific information for recognized words.
1283type WordInfo struct {
1284	// Confidence: The confidence estimate between 0.0 and 1.0. A higher
1285	// number
1286	// indicates an estimated greater likelihood that the recognized words
1287	// are
1288	// correct. This field is set only for the top alternative of a
1289	// non-streaming
1290	// result or, of a streaming result where `is_final=true`.
1291	// This field is not guaranteed to be accurate and users should not rely
1292	// on it
1293	// to be always provided.
1294	// The default of 0.0 is a sentinel value indicating `confidence` was
1295	// not set.
1296	Confidence float64 `json:"confidence,omitempty"`
1297
1298	// EndTime: Time offset relative to the beginning of the audio,
1299	// and corresponding to the end of the spoken word.
1300	// This field is only set if `enable_word_time_offsets=true` and only
1301	// in the top hypothesis.
1302	// This is an experimental feature and the accuracy of the time offset
1303	// can
1304	// vary.
1305	EndTime string `json:"endTime,omitempty"`
1306
1307	// SpeakerTag: Output only. A distinct integer value is assigned for
1308	// every speaker within
1309	// the audio. This field specifies which one of those speakers was
1310	// detected to
1311	// have spoken this word. Value ranges from '1' to
1312	// diarization_speaker_count.
1313	// speaker_tag is set if enable_speaker_diarization = 'true' and only in
1314	// the
1315	// top alternative.
1316	SpeakerTag int64 `json:"speakerTag,omitempty"`
1317
1318	// StartTime: Time offset relative to the beginning of the audio,
1319	// and corresponding to the start of the spoken word.
1320	// This field is only set if `enable_word_time_offsets=true` and only
1321	// in the top hypothesis.
1322	// This is an experimental feature and the accuracy of the time offset
1323	// can
1324	// vary.
1325	StartTime string `json:"startTime,omitempty"`
1326
1327	// Word: The word corresponding to this set of information.
1328	Word string `json:"word,omitempty"`
1329
1330	// ForceSendFields is a list of field names (e.g. "Confidence") to
1331	// unconditionally include in API requests. By default, fields with
1332	// empty values are omitted from API requests. However, any non-pointer,
1333	// non-interface field appearing in ForceSendFields will be sent to the
1334	// server regardless of whether the field is empty or not. This may be
1335	// used to include empty fields in Patch requests.
1336	ForceSendFields []string `json:"-"`
1337
1338	// NullFields is a list of field names (e.g. "Confidence") to include in
1339	// API requests with the JSON null value. By default, fields with empty
1340	// values are omitted from API requests. However, any field with an
1341	// empty value appearing in NullFields will be sent to the server as
1342	// null. It is an error if a field in this list has a non-empty value.
1343	// This may be used to include null fields in Patch requests.
1344	NullFields []string `json:"-"`
1345}
1346
1347func (s *WordInfo) MarshalJSON() ([]byte, error) {
1348	type NoMethod WordInfo
1349	raw := NoMethod(*s)
1350	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
1351}
1352
1353func (s *WordInfo) UnmarshalJSON(data []byte) error {
1354	type NoMethod WordInfo
1355	var s1 struct {
1356		Confidence gensupport.JSONFloat64 `json:"confidence"`
1357		*NoMethod
1358	}
1359	s1.NoMethod = (*NoMethod)(s)
1360	if err := json.Unmarshal(data, &s1); err != nil {
1361		return err
1362	}
1363	s.Confidence = float64(s1.Confidence)
1364	return nil
1365}
1366
1367// method id "speech.operations.get":
1368
1369type OperationsGetCall struct {
1370	s            *Service
1371	name         string
1372	urlParams_   gensupport.URLParams
1373	ifNoneMatch_ string
1374	ctx_         context.Context
1375	header_      http.Header
1376}
1377
1378// Get: Gets the latest state of a long-running operation.  Clients can
1379// use this
1380// method to poll the operation result at intervals as recommended by
1381// the API
1382// service.
1383func (r *OperationsService) Get(name string) *OperationsGetCall {
1384	c := &OperationsGetCall{s: r.s, urlParams_: make(gensupport.URLParams)}
1385	c.name = name
1386	return c
1387}
1388
1389// Fields allows partial responses to be retrieved. See
1390// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
1391// for more information.
1392func (c *OperationsGetCall) Fields(s ...googleapi.Field) *OperationsGetCall {
1393	c.urlParams_.Set("fields", googleapi.CombineFields(s))
1394	return c
1395}
1396
1397// IfNoneMatch sets the optional parameter which makes the operation
1398// fail if the object's ETag matches the given value. This is useful for
1399// getting updates only after the object has changed since the last
1400// request. Use googleapi.IsNotModified to check whether the response
1401// error from Do is the result of In-None-Match.
1402func (c *OperationsGetCall) IfNoneMatch(entityTag string) *OperationsGetCall {
1403	c.ifNoneMatch_ = entityTag
1404	return c
1405}
1406
1407// Context sets the context to be used in this call's Do method. Any
1408// pending HTTP request will be aborted if the provided context is
1409// canceled.
1410func (c *OperationsGetCall) Context(ctx context.Context) *OperationsGetCall {
1411	c.ctx_ = ctx
1412	return c
1413}
1414
1415// Header returns an http.Header that can be modified by the caller to
1416// add HTTP headers to the request.
1417func (c *OperationsGetCall) Header() http.Header {
1418	if c.header_ == nil {
1419		c.header_ = make(http.Header)
1420	}
1421	return c.header_
1422}
1423
1424func (c *OperationsGetCall) doRequest(alt string) (*http.Response, error) {
1425	reqHeaders := make(http.Header)
1426	reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20200302")
1427	for k, v := range c.header_ {
1428		reqHeaders[k] = v
1429	}
1430	reqHeaders.Set("User-Agent", c.s.userAgent())
1431	if c.ifNoneMatch_ != "" {
1432		reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
1433	}
1434	var body io.Reader = nil
1435	c.urlParams_.Set("alt", alt)
1436	c.urlParams_.Set("prettyPrint", "false")
1437	urls := googleapi.ResolveRelative(c.s.BasePath, "v1p1beta1/operations/{+name}")
1438	urls += "?" + c.urlParams_.Encode()
1439	req, err := http.NewRequest("GET", urls, body)
1440	if err != nil {
1441		return nil, err
1442	}
1443	req.Header = reqHeaders
1444	googleapi.Expand(req.URL, map[string]string{
1445		"name": c.name,
1446	})
1447	return gensupport.SendRequest(c.ctx_, c.s.client, req)
1448}
1449
1450// Do executes the "speech.operations.get" call.
1451// Exactly one of *Operation or error will be non-nil. Any non-2xx
1452// status code is an error. Response headers are in either
1453// *Operation.ServerResponse.Header or (if a response was returned at
1454// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
1455// to check whether the returned error was because
1456// http.StatusNotModified was returned.
1457func (c *OperationsGetCall) Do(opts ...googleapi.CallOption) (*Operation, error) {
1458	gensupport.SetOptions(c.urlParams_, opts...)
1459	res, err := c.doRequest("json")
1460	if res != nil && res.StatusCode == http.StatusNotModified {
1461		if res.Body != nil {
1462			res.Body.Close()
1463		}
1464		return nil, &googleapi.Error{
1465			Code:   res.StatusCode,
1466			Header: res.Header,
1467		}
1468	}
1469	if err != nil {
1470		return nil, err
1471	}
1472	defer googleapi.CloseBody(res)
1473	if err := googleapi.CheckResponse(res); err != nil {
1474		return nil, err
1475	}
1476	ret := &Operation{
1477		ServerResponse: googleapi.ServerResponse{
1478			Header:         res.Header,
1479			HTTPStatusCode: res.StatusCode,
1480		},
1481	}
1482	target := &ret
1483	if err := gensupport.DecodeResponse(target, res); err != nil {
1484		return nil, err
1485	}
1486	return ret, nil
1487	// {
1488	//   "description": "Gets the latest state of a long-running operation.  Clients can use this\nmethod to poll the operation result at intervals as recommended by the API\nservice.",
1489	//   "flatPath": "v1p1beta1/operations/{operationsId}",
1490	//   "httpMethod": "GET",
1491	//   "id": "speech.operations.get",
1492	//   "parameterOrder": [
1493	//     "name"
1494	//   ],
1495	//   "parameters": {
1496	//     "name": {
1497	//       "description": "The name of the operation resource.",
1498	//       "location": "path",
1499	//       "pattern": "^.*$",
1500	//       "required": true,
1501	//       "type": "string"
1502	//     }
1503	//   },
1504	//   "path": "v1p1beta1/operations/{+name}",
1505	//   "response": {
1506	//     "$ref": "Operation"
1507	//   },
1508	//   "scopes": [
1509	//     "https://www.googleapis.com/auth/cloud-platform"
1510	//   ]
1511	// }
1512
1513}
1514
1515// method id "speech.operations.list":
1516
1517type OperationsListCall struct {
1518	s            *Service
1519	urlParams_   gensupport.URLParams
1520	ifNoneMatch_ string
1521	ctx_         context.Context
1522	header_      http.Header
1523}
1524
1525// List: Lists operations that match the specified filter in the
1526// request. If the
1527// server doesn't support this method, it returns
1528// `UNIMPLEMENTED`.
1529//
1530// NOTE: the `name` binding allows API services to override the
1531// binding
1532// to use different resource name schemes, such as `users/*/operations`.
1533// To
1534// override the binding, API services can add a binding such
1535// as
1536// "/v1/{name=users/*}/operations" to their service configuration.
1537// For backwards compatibility, the default name includes the
1538// operations
1539// collection id, however overriding users must ensure the name
1540// binding
1541// is the parent resource, without the operations collection id.
1542func (r *OperationsService) List() *OperationsListCall {
1543	c := &OperationsListCall{s: r.s, urlParams_: make(gensupport.URLParams)}
1544	return c
1545}
1546
1547// Filter sets the optional parameter "filter": The standard list
1548// filter.
1549func (c *OperationsListCall) Filter(filter string) *OperationsListCall {
1550	c.urlParams_.Set("filter", filter)
1551	return c
1552}
1553
1554// Name sets the optional parameter "name": The name of the operation's
1555// parent resource.
1556func (c *OperationsListCall) Name(name string) *OperationsListCall {
1557	c.urlParams_.Set("name", name)
1558	return c
1559}
1560
1561// PageSize sets the optional parameter "pageSize": The standard list
1562// page size.
1563func (c *OperationsListCall) PageSize(pageSize int64) *OperationsListCall {
1564	c.urlParams_.Set("pageSize", fmt.Sprint(pageSize))
1565	return c
1566}
1567
1568// PageToken sets the optional parameter "pageToken": The standard list
1569// page token.
1570func (c *OperationsListCall) PageToken(pageToken string) *OperationsListCall {
1571	c.urlParams_.Set("pageToken", pageToken)
1572	return c
1573}
1574
1575// Fields allows partial responses to be retrieved. See
1576// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
1577// for more information.
1578func (c *OperationsListCall) Fields(s ...googleapi.Field) *OperationsListCall {
1579	c.urlParams_.Set("fields", googleapi.CombineFields(s))
1580	return c
1581}
1582
1583// IfNoneMatch sets the optional parameter which makes the operation
1584// fail if the object's ETag matches the given value. This is useful for
1585// getting updates only after the object has changed since the last
1586// request. Use googleapi.IsNotModified to check whether the response
1587// error from Do is the result of In-None-Match.
1588func (c *OperationsListCall) IfNoneMatch(entityTag string) *OperationsListCall {
1589	c.ifNoneMatch_ = entityTag
1590	return c
1591}
1592
1593// Context sets the context to be used in this call's Do method. Any
1594// pending HTTP request will be aborted if the provided context is
1595// canceled.
1596func (c *OperationsListCall) Context(ctx context.Context) *OperationsListCall {
1597	c.ctx_ = ctx
1598	return c
1599}
1600
1601// Header returns an http.Header that can be modified by the caller to
1602// add HTTP headers to the request.
1603func (c *OperationsListCall) Header() http.Header {
1604	if c.header_ == nil {
1605		c.header_ = make(http.Header)
1606	}
1607	return c.header_
1608}
1609
1610func (c *OperationsListCall) doRequest(alt string) (*http.Response, error) {
1611	reqHeaders := make(http.Header)
1612	reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20200302")
1613	for k, v := range c.header_ {
1614		reqHeaders[k] = v
1615	}
1616	reqHeaders.Set("User-Agent", c.s.userAgent())
1617	if c.ifNoneMatch_ != "" {
1618		reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
1619	}
1620	var body io.Reader = nil
1621	c.urlParams_.Set("alt", alt)
1622	c.urlParams_.Set("prettyPrint", "false")
1623	urls := googleapi.ResolveRelative(c.s.BasePath, "v1p1beta1/operations")
1624	urls += "?" + c.urlParams_.Encode()
1625	req, err := http.NewRequest("GET", urls, body)
1626	if err != nil {
1627		return nil, err
1628	}
1629	req.Header = reqHeaders
1630	return gensupport.SendRequest(c.ctx_, c.s.client, req)
1631}
1632
1633// Do executes the "speech.operations.list" call.
1634// Exactly one of *ListOperationsResponse or error will be non-nil. Any
1635// non-2xx status code is an error. Response headers are in either
1636// *ListOperationsResponse.ServerResponse.Header or (if a response was
1637// returned at all) in error.(*googleapi.Error).Header. Use
1638// googleapi.IsNotModified to check whether the returned error was
1639// because http.StatusNotModified was returned.
1640func (c *OperationsListCall) Do(opts ...googleapi.CallOption) (*ListOperationsResponse, error) {
1641	gensupport.SetOptions(c.urlParams_, opts...)
1642	res, err := c.doRequest("json")
1643	if res != nil && res.StatusCode == http.StatusNotModified {
1644		if res.Body != nil {
1645			res.Body.Close()
1646		}
1647		return nil, &googleapi.Error{
1648			Code:   res.StatusCode,
1649			Header: res.Header,
1650		}
1651	}
1652	if err != nil {
1653		return nil, err
1654	}
1655	defer googleapi.CloseBody(res)
1656	if err := googleapi.CheckResponse(res); err != nil {
1657		return nil, err
1658	}
1659	ret := &ListOperationsResponse{
1660		ServerResponse: googleapi.ServerResponse{
1661			Header:         res.Header,
1662			HTTPStatusCode: res.StatusCode,
1663		},
1664	}
1665	target := &ret
1666	if err := gensupport.DecodeResponse(target, res); err != nil {
1667		return nil, err
1668	}
1669	return ret, nil
1670	// {
1671	//   "description": "Lists operations that match the specified filter in the request. If the\nserver doesn't support this method, it returns `UNIMPLEMENTED`.\n\nNOTE: the `name` binding allows API services to override the binding\nto use different resource name schemes, such as `users/*/operations`. To\noverride the binding, API services can add a binding such as\n`\"/v1/{name=users/*}/operations\"` to their service configuration.\nFor backwards compatibility, the default name includes the operations\ncollection id, however overriding users must ensure the name binding\nis the parent resource, without the operations collection id.",
1672	//   "flatPath": "v1p1beta1/operations",
1673	//   "httpMethod": "GET",
1674	//   "id": "speech.operations.list",
1675	//   "parameterOrder": [],
1676	//   "parameters": {
1677	//     "filter": {
1678	//       "description": "The standard list filter.",
1679	//       "location": "query",
1680	//       "type": "string"
1681	//     },
1682	//     "name": {
1683	//       "description": "The name of the operation's parent resource.",
1684	//       "location": "query",
1685	//       "type": "string"
1686	//     },
1687	//     "pageSize": {
1688	//       "description": "The standard list page size.",
1689	//       "format": "int32",
1690	//       "location": "query",
1691	//       "type": "integer"
1692	//     },
1693	//     "pageToken": {
1694	//       "description": "The standard list page token.",
1695	//       "location": "query",
1696	//       "type": "string"
1697	//     }
1698	//   },
1699	//   "path": "v1p1beta1/operations",
1700	//   "response": {
1701	//     "$ref": "ListOperationsResponse"
1702	//   },
1703	//   "scopes": [
1704	//     "https://www.googleapis.com/auth/cloud-platform"
1705	//   ]
1706	// }
1707
1708}
1709
1710// Pages invokes f for each page of results.
1711// A non-nil error returned from f will halt the iteration.
1712// The provided context supersedes any context provided to the Context method.
1713func (c *OperationsListCall) Pages(ctx context.Context, f func(*ListOperationsResponse) error) error {
1714	c.ctx_ = ctx
1715	defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point
1716	for {
1717		x, err := c.Do()
1718		if err != nil {
1719			return err
1720		}
1721		if err := f(x); err != nil {
1722			return err
1723		}
1724		if x.NextPageToken == "" {
1725			return nil
1726		}
1727		c.PageToken(x.NextPageToken)
1728	}
1729}
1730
1731// method id "speech.projects.locations.operations.get":
1732
1733type ProjectsLocationsOperationsGetCall struct {
1734	s            *Service
1735	name         string
1736	urlParams_   gensupport.URLParams
1737	ifNoneMatch_ string
1738	ctx_         context.Context
1739	header_      http.Header
1740}
1741
1742// Get: Gets the latest state of a long-running operation.  Clients can
1743// use this
1744// method to poll the operation result at intervals as recommended by
1745// the API
1746// service.
1747func (r *ProjectsLocationsOperationsService) Get(name string) *ProjectsLocationsOperationsGetCall {
1748	c := &ProjectsLocationsOperationsGetCall{s: r.s, urlParams_: make(gensupport.URLParams)}
1749	c.name = name
1750	return c
1751}
1752
1753// Fields allows partial responses to be retrieved. See
1754// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
1755// for more information.
1756func (c *ProjectsLocationsOperationsGetCall) Fields(s ...googleapi.Field) *ProjectsLocationsOperationsGetCall {
1757	c.urlParams_.Set("fields", googleapi.CombineFields(s))
1758	return c
1759}
1760
1761// IfNoneMatch sets the optional parameter which makes the operation
1762// fail if the object's ETag matches the given value. This is useful for
1763// getting updates only after the object has changed since the last
1764// request. Use googleapi.IsNotModified to check whether the response
1765// error from Do is the result of In-None-Match.
1766func (c *ProjectsLocationsOperationsGetCall) IfNoneMatch(entityTag string) *ProjectsLocationsOperationsGetCall {
1767	c.ifNoneMatch_ = entityTag
1768	return c
1769}
1770
1771// Context sets the context to be used in this call's Do method. Any
1772// pending HTTP request will be aborted if the provided context is
1773// canceled.
1774func (c *ProjectsLocationsOperationsGetCall) Context(ctx context.Context) *ProjectsLocationsOperationsGetCall {
1775	c.ctx_ = ctx
1776	return c
1777}
1778
1779// Header returns an http.Header that can be modified by the caller to
1780// add HTTP headers to the request.
1781func (c *ProjectsLocationsOperationsGetCall) Header() http.Header {
1782	if c.header_ == nil {
1783		c.header_ = make(http.Header)
1784	}
1785	return c.header_
1786}
1787
1788func (c *ProjectsLocationsOperationsGetCall) doRequest(alt string) (*http.Response, error) {
1789	reqHeaders := make(http.Header)
1790	reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20200302")
1791	for k, v := range c.header_ {
1792		reqHeaders[k] = v
1793	}
1794	reqHeaders.Set("User-Agent", c.s.userAgent())
1795	if c.ifNoneMatch_ != "" {
1796		reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
1797	}
1798	var body io.Reader = nil
1799	c.urlParams_.Set("alt", alt)
1800	c.urlParams_.Set("prettyPrint", "false")
1801	urls := googleapi.ResolveRelative(c.s.BasePath, "v1p1beta1/{+name}")
1802	urls += "?" + c.urlParams_.Encode()
1803	req, err := http.NewRequest("GET", urls, body)
1804	if err != nil {
1805		return nil, err
1806	}
1807	req.Header = reqHeaders
1808	googleapi.Expand(req.URL, map[string]string{
1809		"name": c.name,
1810	})
1811	return gensupport.SendRequest(c.ctx_, c.s.client, req)
1812}
1813
1814// Do executes the "speech.projects.locations.operations.get" call.
1815// Exactly one of *Operation or error will be non-nil. Any non-2xx
1816// status code is an error. Response headers are in either
1817// *Operation.ServerResponse.Header or (if a response was returned at
1818// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
1819// to check whether the returned error was because
1820// http.StatusNotModified was returned.
1821func (c *ProjectsLocationsOperationsGetCall) Do(opts ...googleapi.CallOption) (*Operation, error) {
1822	gensupport.SetOptions(c.urlParams_, opts...)
1823	res, err := c.doRequest("json")
1824	if res != nil && res.StatusCode == http.StatusNotModified {
1825		if res.Body != nil {
1826			res.Body.Close()
1827		}
1828		return nil, &googleapi.Error{
1829			Code:   res.StatusCode,
1830			Header: res.Header,
1831		}
1832	}
1833	if err != nil {
1834		return nil, err
1835	}
1836	defer googleapi.CloseBody(res)
1837	if err := googleapi.CheckResponse(res); err != nil {
1838		return nil, err
1839	}
1840	ret := &Operation{
1841		ServerResponse: googleapi.ServerResponse{
1842			Header:         res.Header,
1843			HTTPStatusCode: res.StatusCode,
1844		},
1845	}
1846	target := &ret
1847	if err := gensupport.DecodeResponse(target, res); err != nil {
1848		return nil, err
1849	}
1850	return ret, nil
1851	// {
1852	//   "description": "Gets the latest state of a long-running operation.  Clients can use this\nmethod to poll the operation result at intervals as recommended by the API\nservice.",
1853	//   "flatPath": "v1p1beta1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}",
1854	//   "httpMethod": "GET",
1855	//   "id": "speech.projects.locations.operations.get",
1856	//   "parameterOrder": [
1857	//     "name"
1858	//   ],
1859	//   "parameters": {
1860	//     "name": {
1861	//       "description": "The name of the operation resource.",
1862	//       "location": "path",
1863	//       "pattern": "^projects/[^/]+/locations/[^/]+/operations/[^/]+$",
1864	//       "required": true,
1865	//       "type": "string"
1866	//     }
1867	//   },
1868	//   "path": "v1p1beta1/{+name}",
1869	//   "response": {
1870	//     "$ref": "Operation"
1871	//   },
1872	//   "scopes": [
1873	//     "https://www.googleapis.com/auth/cloud-platform"
1874	//   ]
1875	// }
1876
1877}
1878
1879// method id "speech.projects.locations.operations.list":
1880
1881type ProjectsLocationsOperationsListCall struct {
1882	s            *Service
1883	name         string
1884	urlParams_   gensupport.URLParams
1885	ifNoneMatch_ string
1886	ctx_         context.Context
1887	header_      http.Header
1888}
1889
1890// List: Lists operations that match the specified filter in the
1891// request. If the
1892// server doesn't support this method, it returns
1893// `UNIMPLEMENTED`.
1894//
1895// NOTE: the `name` binding allows API services to override the
1896// binding
1897// to use different resource name schemes, such as `users/*/operations`.
1898// To
1899// override the binding, API services can add a binding such
1900// as
1901// "/v1/{name=users/*}/operations" to their service configuration.
1902// For backwards compatibility, the default name includes the
1903// operations
1904// collection id, however overriding users must ensure the name
1905// binding
1906// is the parent resource, without the operations collection id.
1907func (r *ProjectsLocationsOperationsService) List(name string) *ProjectsLocationsOperationsListCall {
1908	c := &ProjectsLocationsOperationsListCall{s: r.s, urlParams_: make(gensupport.URLParams)}
1909	c.name = name
1910	return c
1911}
1912
1913// Filter sets the optional parameter "filter": The standard list
1914// filter.
1915func (c *ProjectsLocationsOperationsListCall) Filter(filter string) *ProjectsLocationsOperationsListCall {
1916	c.urlParams_.Set("filter", filter)
1917	return c
1918}
1919
1920// PageSize sets the optional parameter "pageSize": The standard list
1921// page size.
1922func (c *ProjectsLocationsOperationsListCall) PageSize(pageSize int64) *ProjectsLocationsOperationsListCall {
1923	c.urlParams_.Set("pageSize", fmt.Sprint(pageSize))
1924	return c
1925}
1926
1927// PageToken sets the optional parameter "pageToken": The standard list
1928// page token.
1929func (c *ProjectsLocationsOperationsListCall) PageToken(pageToken string) *ProjectsLocationsOperationsListCall {
1930	c.urlParams_.Set("pageToken", pageToken)
1931	return c
1932}
1933
1934// Fields allows partial responses to be retrieved. See
1935// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
1936// for more information.
1937func (c *ProjectsLocationsOperationsListCall) Fields(s ...googleapi.Field) *ProjectsLocationsOperationsListCall {
1938	c.urlParams_.Set("fields", googleapi.CombineFields(s))
1939	return c
1940}
1941
1942// IfNoneMatch sets the optional parameter which makes the operation
1943// fail if the object's ETag matches the given value. This is useful for
1944// getting updates only after the object has changed since the last
1945// request. Use googleapi.IsNotModified to check whether the response
1946// error from Do is the result of In-None-Match.
1947func (c *ProjectsLocationsOperationsListCall) IfNoneMatch(entityTag string) *ProjectsLocationsOperationsListCall {
1948	c.ifNoneMatch_ = entityTag
1949	return c
1950}
1951
1952// Context sets the context to be used in this call's Do method. Any
1953// pending HTTP request will be aborted if the provided context is
1954// canceled.
1955func (c *ProjectsLocationsOperationsListCall) Context(ctx context.Context) *ProjectsLocationsOperationsListCall {
1956	c.ctx_ = ctx
1957	return c
1958}
1959
1960// Header returns an http.Header that can be modified by the caller to
1961// add HTTP headers to the request.
1962func (c *ProjectsLocationsOperationsListCall) Header() http.Header {
1963	if c.header_ == nil {
1964		c.header_ = make(http.Header)
1965	}
1966	return c.header_
1967}
1968
1969func (c *ProjectsLocationsOperationsListCall) doRequest(alt string) (*http.Response, error) {
1970	reqHeaders := make(http.Header)
1971	reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20200302")
1972	for k, v := range c.header_ {
1973		reqHeaders[k] = v
1974	}
1975	reqHeaders.Set("User-Agent", c.s.userAgent())
1976	if c.ifNoneMatch_ != "" {
1977		reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
1978	}
1979	var body io.Reader = nil
1980	c.urlParams_.Set("alt", alt)
1981	c.urlParams_.Set("prettyPrint", "false")
1982	urls := googleapi.ResolveRelative(c.s.BasePath, "v1p1beta1/{+name}/operations")
1983	urls += "?" + c.urlParams_.Encode()
1984	req, err := http.NewRequest("GET", urls, body)
1985	if err != nil {
1986		return nil, err
1987	}
1988	req.Header = reqHeaders
1989	googleapi.Expand(req.URL, map[string]string{
1990		"name": c.name,
1991	})
1992	return gensupport.SendRequest(c.ctx_, c.s.client, req)
1993}
1994
1995// Do executes the "speech.projects.locations.operations.list" call.
1996// Exactly one of *ListOperationsResponse or error will be non-nil. Any
1997// non-2xx status code is an error. Response headers are in either
1998// *ListOperationsResponse.ServerResponse.Header or (if a response was
1999// returned at all) in error.(*googleapi.Error).Header. Use
2000// googleapi.IsNotModified to check whether the returned error was
2001// because http.StatusNotModified was returned.
2002func (c *ProjectsLocationsOperationsListCall) Do(opts ...googleapi.CallOption) (*ListOperationsResponse, error) {
2003	gensupport.SetOptions(c.urlParams_, opts...)
2004	res, err := c.doRequest("json")
2005	if res != nil && res.StatusCode == http.StatusNotModified {
2006		if res.Body != nil {
2007			res.Body.Close()
2008		}
2009		return nil, &googleapi.Error{
2010			Code:   res.StatusCode,
2011			Header: res.Header,
2012		}
2013	}
2014	if err != nil {
2015		return nil, err
2016	}
2017	defer googleapi.CloseBody(res)
2018	if err := googleapi.CheckResponse(res); err != nil {
2019		return nil, err
2020	}
2021	ret := &ListOperationsResponse{
2022		ServerResponse: googleapi.ServerResponse{
2023			Header:         res.Header,
2024			HTTPStatusCode: res.StatusCode,
2025		},
2026	}
2027	target := &ret
2028	if err := gensupport.DecodeResponse(target, res); err != nil {
2029		return nil, err
2030	}
2031	return ret, nil
2032	// {
2033	//   "description": "Lists operations that match the specified filter in the request. If the\nserver doesn't support this method, it returns `UNIMPLEMENTED`.\n\nNOTE: the `name` binding allows API services to override the binding\nto use different resource name schemes, such as `users/*/operations`. To\noverride the binding, API services can add a binding such as\n`\"/v1/{name=users/*}/operations\"` to their service configuration.\nFor backwards compatibility, the default name includes the operations\ncollection id, however overriding users must ensure the name binding\nis the parent resource, without the operations collection id.",
2034	//   "flatPath": "v1p1beta1/projects/{projectsId}/locations/{locationsId}/operations",
2035	//   "httpMethod": "GET",
2036	//   "id": "speech.projects.locations.operations.list",
2037	//   "parameterOrder": [
2038	//     "name"
2039	//   ],
2040	//   "parameters": {
2041	//     "filter": {
2042	//       "description": "The standard list filter.",
2043	//       "location": "query",
2044	//       "type": "string"
2045	//     },
2046	//     "name": {
2047	//       "description": "The name of the operation's parent resource.",
2048	//       "location": "path",
2049	//       "pattern": "^projects/[^/]+/locations/[^/]+$",
2050	//       "required": true,
2051	//       "type": "string"
2052	//     },
2053	//     "pageSize": {
2054	//       "description": "The standard list page size.",
2055	//       "format": "int32",
2056	//       "location": "query",
2057	//       "type": "integer"
2058	//     },
2059	//     "pageToken": {
2060	//       "description": "The standard list page token.",
2061	//       "location": "query",
2062	//       "type": "string"
2063	//     }
2064	//   },
2065	//   "path": "v1p1beta1/{+name}/operations",
2066	//   "response": {
2067	//     "$ref": "ListOperationsResponse"
2068	//   },
2069	//   "scopes": [
2070	//     "https://www.googleapis.com/auth/cloud-platform"
2071	//   ]
2072	// }
2073
2074}
2075
2076// Pages invokes f for each page of results.
2077// A non-nil error returned from f will halt the iteration.
2078// The provided context supersedes any context provided to the Context method.
2079func (c *ProjectsLocationsOperationsListCall) Pages(ctx context.Context, f func(*ListOperationsResponse) error) error {
2080	c.ctx_ = ctx
2081	defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point
2082	for {
2083		x, err := c.Do()
2084		if err != nil {
2085			return err
2086		}
2087		if err := f(x); err != nil {
2088			return err
2089		}
2090		if x.NextPageToken == "" {
2091			return nil
2092		}
2093		c.PageToken(x.NextPageToken)
2094	}
2095}
2096
2097// method id "speech.speech.longrunningrecognize":
2098
2099type SpeechLongrunningrecognizeCall struct {
2100	s                           *Service
2101	longrunningrecognizerequest *LongRunningRecognizeRequest
2102	urlParams_                  gensupport.URLParams
2103	ctx_                        context.Context
2104	header_                     http.Header
2105}
2106
2107// Longrunningrecognize: Performs asynchronous speech recognition:
2108// receive results via the
2109// google.longrunning.Operations interface. Returns either
2110// an
2111// `Operation.error` or an `Operation.response` which contains
2112// a `LongRunningRecognizeResponse` message.
2113// For more information on asynchronous speech recognition, see
2114// the
2115// [how-to](https://cloud.google.com/speech-to-text/docs/async-recogn
2116// ize).
2117func (r *SpeechService) Longrunningrecognize(longrunningrecognizerequest *LongRunningRecognizeRequest) *SpeechLongrunningrecognizeCall {
2118	c := &SpeechLongrunningrecognizeCall{s: r.s, urlParams_: make(gensupport.URLParams)}
2119	c.longrunningrecognizerequest = longrunningrecognizerequest
2120	return c
2121}
2122
2123// Fields allows partial responses to be retrieved. See
2124// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
2125// for more information.
2126func (c *SpeechLongrunningrecognizeCall) Fields(s ...googleapi.Field) *SpeechLongrunningrecognizeCall {
2127	c.urlParams_.Set("fields", googleapi.CombineFields(s))
2128	return c
2129}
2130
2131// Context sets the context to be used in this call's Do method. Any
2132// pending HTTP request will be aborted if the provided context is
2133// canceled.
2134func (c *SpeechLongrunningrecognizeCall) Context(ctx context.Context) *SpeechLongrunningrecognizeCall {
2135	c.ctx_ = ctx
2136	return c
2137}
2138
2139// Header returns an http.Header that can be modified by the caller to
2140// add HTTP headers to the request.
2141func (c *SpeechLongrunningrecognizeCall) Header() http.Header {
2142	if c.header_ == nil {
2143		c.header_ = make(http.Header)
2144	}
2145	return c.header_
2146}
2147
2148func (c *SpeechLongrunningrecognizeCall) doRequest(alt string) (*http.Response, error) {
2149	reqHeaders := make(http.Header)
2150	reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20200302")
2151	for k, v := range c.header_ {
2152		reqHeaders[k] = v
2153	}
2154	reqHeaders.Set("User-Agent", c.s.userAgent())
2155	var body io.Reader = nil
2156	body, err := googleapi.WithoutDataWrapper.JSONReader(c.longrunningrecognizerequest)
2157	if err != nil {
2158		return nil, err
2159	}
2160	reqHeaders.Set("Content-Type", "application/json")
2161	c.urlParams_.Set("alt", alt)
2162	c.urlParams_.Set("prettyPrint", "false")
2163	urls := googleapi.ResolveRelative(c.s.BasePath, "v1p1beta1/speech:longrunningrecognize")
2164	urls += "?" + c.urlParams_.Encode()
2165	req, err := http.NewRequest("POST", urls, body)
2166	if err != nil {
2167		return nil, err
2168	}
2169	req.Header = reqHeaders
2170	return gensupport.SendRequest(c.ctx_, c.s.client, req)
2171}
2172
2173// Do executes the "speech.speech.longrunningrecognize" call.
2174// Exactly one of *Operation or error will be non-nil. Any non-2xx
2175// status code is an error. Response headers are in either
2176// *Operation.ServerResponse.Header or (if a response was returned at
2177// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
2178// to check whether the returned error was because
2179// http.StatusNotModified was returned.
2180func (c *SpeechLongrunningrecognizeCall) Do(opts ...googleapi.CallOption) (*Operation, error) {
2181	gensupport.SetOptions(c.urlParams_, opts...)
2182	res, err := c.doRequest("json")
2183	if res != nil && res.StatusCode == http.StatusNotModified {
2184		if res.Body != nil {
2185			res.Body.Close()
2186		}
2187		return nil, &googleapi.Error{
2188			Code:   res.StatusCode,
2189			Header: res.Header,
2190		}
2191	}
2192	if err != nil {
2193		return nil, err
2194	}
2195	defer googleapi.CloseBody(res)
2196	if err := googleapi.CheckResponse(res); err != nil {
2197		return nil, err
2198	}
2199	ret := &Operation{
2200		ServerResponse: googleapi.ServerResponse{
2201			Header:         res.Header,
2202			HTTPStatusCode: res.StatusCode,
2203		},
2204	}
2205	target := &ret
2206	if err := gensupport.DecodeResponse(target, res); err != nil {
2207		return nil, err
2208	}
2209	return ret, nil
2210	// {
2211	//   "description": "Performs asynchronous speech recognition: receive results via the\ngoogle.longrunning.Operations interface. Returns either an\n`Operation.error` or an `Operation.response` which contains\na `LongRunningRecognizeResponse` message.\nFor more information on asynchronous speech recognition, see the\n[how-to](https://cloud.google.com/speech-to-text/docs/async-recognize).",
2212	//   "flatPath": "v1p1beta1/speech:longrunningrecognize",
2213	//   "httpMethod": "POST",
2214	//   "id": "speech.speech.longrunningrecognize",
2215	//   "parameterOrder": [],
2216	//   "parameters": {},
2217	//   "path": "v1p1beta1/speech:longrunningrecognize",
2218	//   "request": {
2219	//     "$ref": "LongRunningRecognizeRequest"
2220	//   },
2221	//   "response": {
2222	//     "$ref": "Operation"
2223	//   },
2224	//   "scopes": [
2225	//     "https://www.googleapis.com/auth/cloud-platform"
2226	//   ]
2227	// }
2228
2229}
2230
2231// method id "speech.speech.recognize":
2232
2233type SpeechRecognizeCall struct {
2234	s                *Service
2235	recognizerequest *RecognizeRequest
2236	urlParams_       gensupport.URLParams
2237	ctx_             context.Context
2238	header_          http.Header
2239}
2240
2241// Recognize: Performs synchronous speech recognition: receive results
2242// after all audio
2243// has been sent and processed.
2244func (r *SpeechService) Recognize(recognizerequest *RecognizeRequest) *SpeechRecognizeCall {
2245	c := &SpeechRecognizeCall{s: r.s, urlParams_: make(gensupport.URLParams)}
2246	c.recognizerequest = recognizerequest
2247	return c
2248}
2249
2250// Fields allows partial responses to be retrieved. See
2251// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
2252// for more information.
2253func (c *SpeechRecognizeCall) Fields(s ...googleapi.Field) *SpeechRecognizeCall {
2254	c.urlParams_.Set("fields", googleapi.CombineFields(s))
2255	return c
2256}
2257
2258// Context sets the context to be used in this call's Do method. Any
2259// pending HTTP request will be aborted if the provided context is
2260// canceled.
2261func (c *SpeechRecognizeCall) Context(ctx context.Context) *SpeechRecognizeCall {
2262	c.ctx_ = ctx
2263	return c
2264}
2265
2266// Header returns an http.Header that can be modified by the caller to
2267// add HTTP headers to the request.
2268func (c *SpeechRecognizeCall) Header() http.Header {
2269	if c.header_ == nil {
2270		c.header_ = make(http.Header)
2271	}
2272	return c.header_
2273}
2274
2275func (c *SpeechRecognizeCall) doRequest(alt string) (*http.Response, error) {
2276	reqHeaders := make(http.Header)
2277	reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20200302")
2278	for k, v := range c.header_ {
2279		reqHeaders[k] = v
2280	}
2281	reqHeaders.Set("User-Agent", c.s.userAgent())
2282	var body io.Reader = nil
2283	body, err := googleapi.WithoutDataWrapper.JSONReader(c.recognizerequest)
2284	if err != nil {
2285		return nil, err
2286	}
2287	reqHeaders.Set("Content-Type", "application/json")
2288	c.urlParams_.Set("alt", alt)
2289	c.urlParams_.Set("prettyPrint", "false")
2290	urls := googleapi.ResolveRelative(c.s.BasePath, "v1p1beta1/speech:recognize")
2291	urls += "?" + c.urlParams_.Encode()
2292	req, err := http.NewRequest("POST", urls, body)
2293	if err != nil {
2294		return nil, err
2295	}
2296	req.Header = reqHeaders
2297	return gensupport.SendRequest(c.ctx_, c.s.client, req)
2298}
2299
2300// Do executes the "speech.speech.recognize" call.
2301// Exactly one of *RecognizeResponse or error will be non-nil. Any
2302// non-2xx status code is an error. Response headers are in either
2303// *RecognizeResponse.ServerResponse.Header or (if a response was
2304// returned at all) in error.(*googleapi.Error).Header. Use
2305// googleapi.IsNotModified to check whether the returned error was
2306// because http.StatusNotModified was returned.
2307func (c *SpeechRecognizeCall) Do(opts ...googleapi.CallOption) (*RecognizeResponse, error) {
2308	gensupport.SetOptions(c.urlParams_, opts...)
2309	res, err := c.doRequest("json")
2310	if res != nil && res.StatusCode == http.StatusNotModified {
2311		if res.Body != nil {
2312			res.Body.Close()
2313		}
2314		return nil, &googleapi.Error{
2315			Code:   res.StatusCode,
2316			Header: res.Header,
2317		}
2318	}
2319	if err != nil {
2320		return nil, err
2321	}
2322	defer googleapi.CloseBody(res)
2323	if err := googleapi.CheckResponse(res); err != nil {
2324		return nil, err
2325	}
2326	ret := &RecognizeResponse{
2327		ServerResponse: googleapi.ServerResponse{
2328			Header:         res.Header,
2329			HTTPStatusCode: res.StatusCode,
2330		},
2331	}
2332	target := &ret
2333	if err := gensupport.DecodeResponse(target, res); err != nil {
2334		return nil, err
2335	}
2336	return ret, nil
2337	// {
2338	//   "description": "Performs synchronous speech recognition: receive results after all audio\nhas been sent and processed.",
2339	//   "flatPath": "v1p1beta1/speech:recognize",
2340	//   "httpMethod": "POST",
2341	//   "id": "speech.speech.recognize",
2342	//   "parameterOrder": [],
2343	//   "parameters": {},
2344	//   "path": "v1p1beta1/speech:recognize",
2345	//   "request": {
2346	//     "$ref": "RecognizeRequest"
2347	//   },
2348	//   "response": {
2349	//     "$ref": "RecognizeResponse"
2350	//   },
2351	//   "scopes": [
2352	//     "https://www.googleapis.com/auth/cloud-platform"
2353	//   ]
2354	// }
2355
2356}
2357