1// Copyright 2019 Google LLC.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5// Code generated file. DO NOT EDIT.
6
7// Package speech provides access to the Cloud Speech-to-Text API.
8//
9// This package is DEPRECATED. Use package cloud.google.com/go/speech/apiv1 instead.
10//
11// For product documentation, see: https://cloud.google.com/speech-to-text/docs/quickstart-protocol
12//
13// Creating a client
14//
15// Usage example:
16//
17//   import "google.golang.org/api/speech/v1p1beta1"
18//   ...
19//   ctx := context.Background()
20//   speechService, err := speech.NewService(ctx)
21//
22// In this example, Google Application Default Credentials are used for authentication.
23//
24// For information on how to create and obtain Application Default Credentials, see https://developers.google.com/identity/protocols/application-default-credentials.
25//
26// Other authentication options
27//
28// To use an API key for authentication (note: some APIs do not support API keys), use option.WithAPIKey:
29//
30//   speechService, err := speech.NewService(ctx, option.WithAPIKey("AIza..."))
31//
32// To use an OAuth token (e.g., a user token obtained via a three-legged OAuth flow), use option.WithTokenSource:
33//
34//   config := &oauth2.Config{...}
35//   // ...
36//   token, err := config.Exchange(ctx, ...)
37//   speechService, err := speech.NewService(ctx, option.WithTokenSource(config.TokenSource(ctx, token)))
38//
39// See https://godoc.org/google.golang.org/api/option/ for details on options.
40package speech // import "google.golang.org/api/speech/v1p1beta1"
41
42import (
43	"bytes"
44	"context"
45	"encoding/json"
46	"errors"
47	"fmt"
48	"io"
49	"net/http"
50	"net/url"
51	"strconv"
52	"strings"
53
54	gensupport "google.golang.org/api/gensupport"
55	googleapi "google.golang.org/api/googleapi"
56	option "google.golang.org/api/option"
57	htransport "google.golang.org/api/transport/http"
58)
59
60// Always reference these packages, just in case the auto-generated code
61// below doesn't.
62var _ = bytes.NewBuffer
63var _ = strconv.Itoa
64var _ = fmt.Sprintf
65var _ = json.NewDecoder
66var _ = io.Copy
67var _ = url.Parse
68var _ = gensupport.MarshalJSON
69var _ = googleapi.Version
70var _ = errors.New
71var _ = strings.Replace
72var _ = context.Canceled
73
74const apiId = "speech:v1p1beta1"
75const apiName = "speech"
76const apiVersion = "v1p1beta1"
77const basePath = "https://speech.googleapis.com/"
78
79// OAuth2 scopes used by this API.
80const (
81	// View and manage your data across Google Cloud Platform services
82	CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform"
83)
84
85// NewService creates a new Service.
86func NewService(ctx context.Context, opts ...option.ClientOption) (*Service, error) {
87	scopesOption := option.WithScopes(
88		"https://www.googleapis.com/auth/cloud-platform",
89	)
90	// NOTE: prepend, so we don't override user-specified scopes.
91	opts = append([]option.ClientOption{scopesOption}, opts...)
92	client, endpoint, err := htransport.NewClient(ctx, opts...)
93	if err != nil {
94		return nil, err
95	}
96	s, err := New(client)
97	if err != nil {
98		return nil, err
99	}
100	if endpoint != "" {
101		s.BasePath = endpoint
102	}
103	return s, nil
104}
105
106// New creates a new Service. It uses the provided http.Client for requests.
107//
108// Deprecated: please use NewService instead.
109// To provide a custom HTTP client, use option.WithHTTPClient.
110// If you are using google.golang.org/api/googleapis/transport.APIKey, use option.WithAPIKey with NewService instead.
111func New(client *http.Client) (*Service, error) {
112	if client == nil {
113		return nil, errors.New("client is nil")
114	}
115	s := &Service{client: client, BasePath: basePath}
116	s.Operations = NewOperationsService(s)
117	s.Projects = NewProjectsService(s)
118	s.Speech = NewSpeechService(s)
119	return s, nil
120}
121
122type Service struct {
123	client    *http.Client
124	BasePath  string // API endpoint base URL
125	UserAgent string // optional additional User-Agent fragment
126
127	Operations *OperationsService
128
129	Projects *ProjectsService
130
131	Speech *SpeechService
132}
133
134func (s *Service) userAgent() string {
135	if s.UserAgent == "" {
136		return googleapi.UserAgent
137	}
138	return googleapi.UserAgent + " " + s.UserAgent
139}
140
141func NewOperationsService(s *Service) *OperationsService {
142	rs := &OperationsService{s: s}
143	return rs
144}
145
146type OperationsService struct {
147	s *Service
148}
149
150func NewProjectsService(s *Service) *ProjectsService {
151	rs := &ProjectsService{s: s}
152	rs.Locations = NewProjectsLocationsService(s)
153	rs.Operations = NewProjectsOperationsService(s)
154	return rs
155}
156
157type ProjectsService struct {
158	s *Service
159
160	Locations *ProjectsLocationsService
161
162	Operations *ProjectsOperationsService
163}
164
165func NewProjectsLocationsService(s *Service) *ProjectsLocationsService {
166	rs := &ProjectsLocationsService{s: s}
167	rs.Operations = NewProjectsLocationsOperationsService(s)
168	return rs
169}
170
171type ProjectsLocationsService struct {
172	s *Service
173
174	Operations *ProjectsLocationsOperationsService
175}
176
177func NewProjectsLocationsOperationsService(s *Service) *ProjectsLocationsOperationsService {
178	rs := &ProjectsLocationsOperationsService{s: s}
179	return rs
180}
181
182type ProjectsLocationsOperationsService struct {
183	s *Service
184}
185
186func NewProjectsOperationsService(s *Service) *ProjectsOperationsService {
187	rs := &ProjectsOperationsService{s: s}
188	rs.ManualRecognitionTasks = NewProjectsOperationsManualRecognitionTasksService(s)
189	return rs
190}
191
192type ProjectsOperationsService struct {
193	s *Service
194
195	ManualRecognitionTasks *ProjectsOperationsManualRecognitionTasksService
196}
197
198func NewProjectsOperationsManualRecognitionTasksService(s *Service) *ProjectsOperationsManualRecognitionTasksService {
199	rs := &ProjectsOperationsManualRecognitionTasksService{s: s}
200	return rs
201}
202
203type ProjectsOperationsManualRecognitionTasksService struct {
204	s *Service
205}
206
207func NewSpeechService(s *Service) *SpeechService {
208	rs := &SpeechService{s: s}
209	return rs
210}
211
212type SpeechService struct {
213	s *Service
214}
215
216// ListOperationsResponse: The response message for
217// Operations.ListOperations.
218type ListOperationsResponse struct {
219	// NextPageToken: The standard List next-page token.
220	NextPageToken string `json:"nextPageToken,omitempty"`
221
222	// Operations: A list of operations that matches the specified filter in
223	// the request.
224	Operations []*Operation `json:"operations,omitempty"`
225
226	// ServerResponse contains the HTTP response code and headers from the
227	// server.
228	googleapi.ServerResponse `json:"-"`
229
230	// ForceSendFields is a list of field names (e.g. "NextPageToken") to
231	// unconditionally include in API requests. By default, fields with
232	// empty values are omitted from API requests. However, any non-pointer,
233	// non-interface field appearing in ForceSendFields will be sent to the
234	// server regardless of whether the field is empty or not. This may be
235	// used to include empty fields in Patch requests.
236	ForceSendFields []string `json:"-"`
237
238	// NullFields is a list of field names (e.g. "NextPageToken") to include
239	// in API requests with the JSON null value. By default, fields with
240	// empty values are omitted from API requests. However, any field with
241	// an empty value appearing in NullFields will be sent to the server as
242	// null. It is an error if a field in this list has a non-empty value.
243	// This may be used to include null fields in Patch requests.
244	NullFields []string `json:"-"`
245}
246
247func (s *ListOperationsResponse) MarshalJSON() ([]byte, error) {
248	type NoMethod ListOperationsResponse
249	raw := NoMethod(*s)
250	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
251}
252
253// LongRunningRecognizeMetadata: Describes the progress of a
254// long-running `LongRunningRecognize` call. It is
255// included in the `metadata` field of the `Operation` returned by
256// the
257// `GetOperation` call of the `google::longrunning::Operations` service.
258type LongRunningRecognizeMetadata struct {
259	// LastUpdateTime: Time of the most recent processing update.
260	LastUpdateTime string `json:"lastUpdateTime,omitempty"`
261
262	// ProgressPercent: Approximate percentage of audio processed thus far.
263	// Guaranteed to be 100
264	// when the audio is fully processed and the results are available.
265	ProgressPercent int64 `json:"progressPercent,omitempty"`
266
267	// StartTime: Time when the request was received.
268	StartTime string `json:"startTime,omitempty"`
269
270	// ForceSendFields is a list of field names (e.g. "LastUpdateTime") to
271	// unconditionally include in API requests. By default, fields with
272	// empty values are omitted from API requests. However, any non-pointer,
273	// non-interface field appearing in ForceSendFields will be sent to the
274	// server regardless of whether the field is empty or not. This may be
275	// used to include empty fields in Patch requests.
276	ForceSendFields []string `json:"-"`
277
278	// NullFields is a list of field names (e.g. "LastUpdateTime") to
279	// include in API requests with the JSON null value. By default, fields
280	// with empty values are omitted from API requests. However, any field
281	// with an empty value appearing in NullFields will be sent to the
282	// server as null. It is an error if a field in this list has a
283	// non-empty value. This may be used to include null fields in Patch
284	// requests.
285	NullFields []string `json:"-"`
286}
287
288func (s *LongRunningRecognizeMetadata) MarshalJSON() ([]byte, error) {
289	type NoMethod LongRunningRecognizeMetadata
290	raw := NoMethod(*s)
291	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
292}
293
294// LongRunningRecognizeRequest: The top-level message sent by the client
295// for the `LongRunningRecognize`
296// method.
297type LongRunningRecognizeRequest struct {
298	// Audio: *Required* The audio data to be recognized.
299	Audio *RecognitionAudio `json:"audio,omitempty"`
300
301	// Config: *Required* Provides information to the recognizer that
302	// specifies how to
303	// process the request.
304	Config *RecognitionConfig `json:"config,omitempty"`
305
306	// ForceSendFields is a list of field names (e.g. "Audio") to
307	// unconditionally include in API requests. By default, fields with
308	// empty values are omitted from API requests. However, any non-pointer,
309	// non-interface field appearing in ForceSendFields will be sent to the
310	// server regardless of whether the field is empty or not. This may be
311	// used to include empty fields in Patch requests.
312	ForceSendFields []string `json:"-"`
313
314	// NullFields is a list of field names (e.g. "Audio") to include in API
315	// requests with the JSON null value. By default, fields with empty
316	// values are omitted from API requests. However, any field with an
317	// empty value appearing in NullFields will be sent to the server as
318	// null. It is an error if a field in this list has a non-empty value.
319	// This may be used to include null fields in Patch requests.
320	NullFields []string `json:"-"`
321}
322
323func (s *LongRunningRecognizeRequest) MarshalJSON() ([]byte, error) {
324	type NoMethod LongRunningRecognizeRequest
325	raw := NoMethod(*s)
326	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
327}
328
329// LongRunningRecognizeResponse: The only message returned to the client
330// by the `LongRunningRecognize` method.
331// It contains the result as zero or more sequential
332// `SpeechRecognitionResult`
333// messages. It is included in the `result.response` field of the
334// `Operation`
335// returned by the `GetOperation` call of the
336// `google::longrunning::Operations`
337// service.
338type LongRunningRecognizeResponse struct {
339	// Results: Output only. Sequential list of transcription results
340	// corresponding to
341	// sequential portions of audio.
342	Results []*SpeechRecognitionResult `json:"results,omitempty"`
343
344	// ForceSendFields is a list of field names (e.g. "Results") to
345	// unconditionally include in API requests. By default, fields with
346	// empty values are omitted from API requests. However, any non-pointer,
347	// non-interface field appearing in ForceSendFields will be sent to the
348	// server regardless of whether the field is empty or not. This may be
349	// used to include empty fields in Patch requests.
350	ForceSendFields []string `json:"-"`
351
352	// NullFields is a list of field names (e.g. "Results") to include in
353	// API requests with the JSON null value. By default, fields with empty
354	// values are omitted from API requests. However, any field with an
355	// empty value appearing in NullFields will be sent to the server as
356	// null. It is an error if a field in this list has a non-empty value.
357	// This may be used to include null fields in Patch requests.
358	NullFields []string `json:"-"`
359}
360
361func (s *LongRunningRecognizeResponse) MarshalJSON() ([]byte, error) {
362	type NoMethod LongRunningRecognizeResponse
363	raw := NoMethod(*s)
364	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
365}
366
367// Operation: This resource represents a long-running operation that is
368// the result of a
369// network API call.
370type Operation struct {
371	// Done: If the value is `false`, it means the operation is still in
372	// progress.
373	// If `true`, the operation is completed, and either `error` or
374	// `response` is
375	// available.
376	Done bool `json:"done,omitempty"`
377
378	// Error: The error result of the operation in case of failure or
379	// cancellation.
380	Error *Status `json:"error,omitempty"`
381
382	// Metadata: Service-specific metadata associated with the operation.
383	// It typically
384	// contains progress information and common metadata such as create
385	// time.
386	// Some services might not provide such metadata.  Any method that
387	// returns a
388	// long-running operation should document the metadata type, if any.
389	Metadata googleapi.RawMessage `json:"metadata,omitempty"`
390
391	// Name: The server-assigned name, which is only unique within the same
392	// service that
393	// originally returns it. If you use the default HTTP mapping,
394	// the
395	// `name` should be a resource name ending with
396	// `operations/{unique_id}`.
397	Name string `json:"name,omitempty"`
398
399	// Response: The normal response of the operation in case of success.
400	// If the original
401	// method returns no data on success, such as `Delete`, the response
402	// is
403	// `google.protobuf.Empty`.  If the original method is
404	// standard
405	// `Get`/`Create`/`Update`, the response should be the resource.  For
406	// other
407	// methods, the response should have the type `XxxResponse`, where
408	// `Xxx`
409	// is the original method name.  For example, if the original method
410	// name
411	// is `TakeSnapshot()`, the inferred response type
412	// is
413	// `TakeSnapshotResponse`.
414	Response googleapi.RawMessage `json:"response,omitempty"`
415
416	// ServerResponse contains the HTTP response code and headers from the
417	// server.
418	googleapi.ServerResponse `json:"-"`
419
420	// ForceSendFields is a list of field names (e.g. "Done") to
421	// unconditionally include in API requests. By default, fields with
422	// empty values are omitted from API requests. However, any non-pointer,
423	// non-interface field appearing in ForceSendFields will be sent to the
424	// server regardless of whether the field is empty or not. This may be
425	// used to include empty fields in Patch requests.
426	ForceSendFields []string `json:"-"`
427
428	// NullFields is a list of field names (e.g. "Done") to include in API
429	// requests with the JSON null value. By default, fields with empty
430	// values are omitted from API requests. However, any field with an
431	// empty value appearing in NullFields will be sent to the server as
432	// null. It is an error if a field in this list has a non-empty value.
433	// This may be used to include null fields in Patch requests.
434	NullFields []string `json:"-"`
435}
436
437func (s *Operation) MarshalJSON() ([]byte, error) {
438	type NoMethod Operation
439	raw := NoMethod(*s)
440	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
441}
442
443// RecognitionAudio: Contains audio data in the encoding specified in
444// the `RecognitionConfig`.
445// Either `content` or `uri` must be supplied. Supplying both or
446// neither
447// returns google.rpc.Code.INVALID_ARGUMENT. See
448// [content limits](/speech-to-text/quotas#content).
449type RecognitionAudio struct {
450	// Content: The audio data bytes encoded as specified
451	// in
452	// `RecognitionConfig`. Note: as with all bytes fields, proto buffers
453	// use a
454	// pure binary representation, whereas JSON representations use base64.
455	Content string `json:"content,omitempty"`
456
457	// Uri: URI that points to a file that contains audio data bytes as
458	// specified in
459	// `RecognitionConfig`. The file must not be compressed (for example,
460	// gzip).
461	// Currently, only Google Cloud Storage URIs are
462	// supported, which must be specified in the following
463	// format:
464	// `gs://bucket_name/object_name` (other URI formats
465	// return
466	// google.rpc.Code.INVALID_ARGUMENT). For more information, see
467	// [Request URIs](https://cloud.google.com/storage/docs/reference-uris).
468	Uri string `json:"uri,omitempty"`
469
470	// ForceSendFields is a list of field names (e.g. "Content") to
471	// unconditionally include in API requests. By default, fields with
472	// empty values are omitted from API requests. However, any non-pointer,
473	// non-interface field appearing in ForceSendFields will be sent to the
474	// server regardless of whether the field is empty or not. This may be
475	// used to include empty fields in Patch requests.
476	ForceSendFields []string `json:"-"`
477
478	// NullFields is a list of field names (e.g. "Content") to include in
479	// API requests with the JSON null value. By default, fields with empty
480	// values are omitted from API requests. However, any field with an
481	// empty value appearing in NullFields will be sent to the server as
482	// null. It is an error if a field in this list has a non-empty value.
483	// This may be used to include null fields in Patch requests.
484	NullFields []string `json:"-"`
485}
486
487func (s *RecognitionAudio) MarshalJSON() ([]byte, error) {
488	type NoMethod RecognitionAudio
489	raw := NoMethod(*s)
490	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
491}
492
493// RecognitionConfig: Provides information to the recognizer that
494// specifies how to process the
495// request.
496type RecognitionConfig struct {
497	// AlternativeLanguageCodes: *Optional* A list of up to 3
498	// additional
499	// [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language
500	// tags,
501	// listing possible alternative languages of the supplied audio.
502	// See [Language Support](/speech-to-text/docs/languages)
503	// for a list of the currently supported language codes.
504	// If alternative languages are listed, recognition result will
505	// contain
506	// recognition in the most likely language detected including the
507	// main
508	// language_code. The recognition result will include the language
509	// tag
510	// of the language detected in the audio.
511	// Note: This feature is only supported for Voice Command and Voice
512	// Search
513	// use cases and performance may vary for other use cases (e.g., phone
514	// call
515	// transcription).
516	AlternativeLanguageCodes []string `json:"alternativeLanguageCodes,omitempty"`
517
518	// AudioChannelCount: *Optional* The number of channels in the input
519	// audio data.
520	// ONLY set this for MULTI-CHANNEL recognition.
521	// Valid values for LINEAR16 and FLAC are `1`-`8`.
522	// Valid values for OGG_OPUS are '1'-'254'.
523	// Valid value for MULAW, AMR, AMR_WB and SPEEX_WITH_HEADER_BYTE is only
524	// `1`.
525	// If `0` or omitted, defaults to one channel (mono).
526	// Note: We only recognize the first channel by default.
527	// To perform independent recognition on each channel
528	// set
529	// `enable_separate_recognition_per_channel` to 'true'.
530	AudioChannelCount int64 `json:"audioChannelCount,omitempty"`
531
532	// DiarizationConfig: *Optional* Config to enable speaker diarization
533	// and set additional
534	// parameters to make diarization better suited for your
535	// application.
536	// Note: When this is enabled, we send all the words from the beginning
537	// of the
538	// audio for the top alternative in every consecutive STREAMING
539	// responses.
540	// This is done in order to improve our speaker tags as our models learn
541	// to
542	// identify the speakers in the conversation over time.
543	// For non-streaming requests, the diarization results will be provided
544	// only
545	// in the top alternative of the FINAL SpeechRecognitionResult.
546	DiarizationConfig *SpeakerDiarizationConfig `json:"diarizationConfig,omitempty"`
547
548	// DiarizationSpeakerCount: *Optional*
549	// If set, specifies the estimated number of speakers in the
550	// conversation.
551	// If not set, defaults to '2'.
552	// Ignored unless enable_speaker_diarization is set to true."
553	// Note: Use diarization_config instead. This field will be DEPRECATED
554	// soon.
555	DiarizationSpeakerCount int64 `json:"diarizationSpeakerCount,omitempty"`
556
557	// EnableAutomaticPunctuation: *Optional* If 'true', adds punctuation to
558	// recognition result hypotheses.
559	// This feature is only available in select languages. Setting this
560	// for
561	// requests in other languages has no effect at all.
562	// The default 'false' value does not add punctuation to result
563	// hypotheses.
564	// Note: This is currently offered as an experimental service,
565	// complimentary
566	// to all users. In the future this may be exclusively available as
567	// a
568	// premium feature.
569	EnableAutomaticPunctuation bool `json:"enableAutomaticPunctuation,omitempty"`
570
571	// EnableSeparateRecognitionPerChannel: This needs to be set to `true`
572	// explicitly and `audio_channel_count` > 1
573	// to get each channel recognized separately. The recognition result
574	// will
575	// contain a `channel_tag` field to state which channel that result
576	// belongs
577	// to. If this is not true, we will only recognize the first channel.
578	// The
579	// request is billed cumulatively for all channels
580	// recognized:
581	// `audio_channel_count` multiplied by the length of the audio.
582	EnableSeparateRecognitionPerChannel bool `json:"enableSeparateRecognitionPerChannel,omitempty"`
583
584	// EnableSpeakerDiarization: *Optional* If 'true', enables speaker
585	// detection for each recognized word in
586	// the top alternative of the recognition result using a speaker_tag
587	// provided
588	// in the WordInfo.
589	// Note: Use diarization_config instead. This field will be DEPRECATED
590	// soon.
591	EnableSpeakerDiarization bool `json:"enableSpeakerDiarization,omitempty"`
592
593	// EnableWordConfidence: *Optional* If `true`, the top result includes a
594	// list of words and the
595	// confidence for those words. If `false`, no word-level
596	// confidence
597	// information is returned. The default is `false`.
598	EnableWordConfidence bool `json:"enableWordConfidence,omitempty"`
599
600	// EnableWordTimeOffsets: *Optional* If `true`, the top result includes
601	// a list of words and
602	// the start and end time offsets (timestamps) for those words.
603	// If
604	// `false`, no word-level time offset information is returned. The
605	// default is
606	// `false`.
607	EnableWordTimeOffsets bool `json:"enableWordTimeOffsets,omitempty"`
608
609	// Encoding: Encoding of audio data sent in all `RecognitionAudio`
610	// messages.
611	// This field is optional for `FLAC` and `WAV` audio files and
612	// required
613	// for all other audio formats. For details, see AudioEncoding.
614	//
615	// Possible values:
616	//   "ENCODING_UNSPECIFIED" - Not specified.
617	//   "LINEAR16" - Uncompressed 16-bit signed little-endian samples
618	// (Linear PCM).
619	//   "FLAC" - `FLAC` (Free Lossless Audio
620	// Codec) is the recommended encoding because it is
621	// lossless--therefore recognition is not compromised--and
622	// requires only about half the bandwidth of `LINEAR16`. `FLAC`
623	// stream
624	// encoding supports 16-bit and 24-bit samples, however, not all fields
625	// in
626	// `STREAMINFO` are supported.
627	//   "MULAW" - 8-bit samples that compand 14-bit audio samples using
628	// G.711 PCMU/mu-law.
629	//   "AMR" - Adaptive Multi-Rate Narrowband codec. `sample_rate_hertz`
630	// must be 8000.
631	//   "AMR_WB" - Adaptive Multi-Rate Wideband codec. `sample_rate_hertz`
632	// must be 16000.
633	//   "OGG_OPUS" - Opus encoded audio frames in Ogg
634	// container
635	// ([OggOpus](https://wiki.xiph.org/OggOpus)).
636	// `sample_rate_her
637	// tz` must be one of 8000, 12000, 16000, 24000, or 48000.
638	//   "SPEEX_WITH_HEADER_BYTE" - Although the use of lossy encodings is
639	// not recommended, if a very low
640	// bitrate encoding is required, `OGG_OPUS` is highly preferred
641	// over
642	// Speex encoding. The [Speex](https://speex.org/)  encoding supported
643	// by
644	// Cloud Speech API has a header byte in each block, as in MIME
645	// type
646	// `audio/x-speex-with-header-byte`.
647	// It is a variant of the RTP Speex encoding defined in
648	// [RFC 5574](https://tools.ietf.org/html/rfc5574).
649	// The stream is a sequence of blocks, one block per RTP packet. Each
650	// block
651	// starts with a byte containing the length of the block, in bytes,
652	// followed
653	// by one or more frames of Speex data, padded to an integral number
654	// of
655	// bytes (octets) as specified in RFC 5574. In other words, each RTP
656	// header
657	// is replaced with a single byte containing the block length. Only
658	// Speex
659	// wideband is supported. `sample_rate_hertz` must be 16000.
660	//   "MP3" - MP3 audio. Support all standard MP3 bitrates (which range
661	// from 32-320
662	// kbps). When using this encoding, `sample_rate_hertz` can be
663	// optionally
664	// unset if not known.
665	Encoding string `json:"encoding,omitempty"`
666
667	// LanguageCode: *Required* The language of the supplied audio as
668	// a
669	// [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language
670	// tag.
671	// Example: "en-US".
672	// See [Language Support](/speech-to-text/docs/languages)
673	// for a list of the currently supported language codes.
674	LanguageCode string `json:"languageCode,omitempty"`
675
676	// MaxAlternatives: *Optional* Maximum number of recognition hypotheses
677	// to be returned.
678	// Specifically, the maximum number of `SpeechRecognitionAlternative`
679	// messages
680	// within each `SpeechRecognitionResult`.
681	// The server may return fewer than `max_alternatives`.
682	// Valid values are `0`-`30`. A value of `0` or `1` will return a
683	// maximum of
684	// one. If omitted, will return a maximum of one.
685	MaxAlternatives int64 `json:"maxAlternatives,omitempty"`
686
687	// Metadata: *Optional* Metadata regarding this request.
688	Metadata *RecognitionMetadata `json:"metadata,omitempty"`
689
690	// Model: *Optional* Which model to select for the given request. Select
691	// the model
692	// best suited to your domain to get best results. If a model is
693	// not
694	// explicitly specified, then we auto-select a model based on the
695	// parameters
696	// in the RecognitionConfig.
697	// <table>
698	//   <tr>
699	//     <td><b>Model</b></td>
700	//     <td><b>Description</b></td>
701	//   </tr>
702	//   <tr>
703	//     <td><code>command_and_search</code></td>
704	//     <td>Best for short queries such as voice commands or voice
705	// search.</td>
706	//   </tr>
707	//   <tr>
708	//     <td><code>phone_call</code></td>
709	//     <td>Best for audio that originated from a phone call (typically
710	//     recorded at an 8khz sampling rate).</td>
711	//   </tr>
712	//   <tr>
713	//     <td><code>video</code></td>
714	//     <td>Best for audio that originated from from video or includes
715	// multiple
716	//         speakers. Ideally the audio is recorded at a 16khz or
717	// greater
718	//         sampling rate. This is a premium model that costs more than
719	// the
720	//         standard rate.</td>
721	//   </tr>
722	//   <tr>
723	//     <td><code>default</code></td>
724	//     <td>Best for audio that is not one of the specific audio models.
725	//         For example, long-form audio. Ideally the audio is
726	// high-fidelity,
727	//         recorded at a 16khz or greater sampling rate.</td>
728	//   </tr>
729	// </table>
730	Model string `json:"model,omitempty"`
731
732	// ProfanityFilter: *Optional* If set to `true`, the server will attempt
733	// to filter out
734	// profanities, replacing all but the initial character in each filtered
735	// word
736	// with asterisks, e.g. "f***". If set to `false` or omitted,
737	// profanities
738	// won't be filtered out.
739	ProfanityFilter bool `json:"profanityFilter,omitempty"`
740
741	// SampleRateHertz: Sample rate in Hertz of the audio data sent in
742	// all
743	// `RecognitionAudio` messages. Valid values are: 8000-48000.
744	// 16000 is optimal. For best results, set the sampling rate of the
745	// audio
746	// source to 16000 Hz. If that's not possible, use the native sample
747	// rate of
748	// the audio source (instead of re-sampling).
749	// This field is optional for FLAC and WAV audio files, but is
750	// required for all other audio formats. For details, see AudioEncoding.
751	SampleRateHertz int64 `json:"sampleRateHertz,omitempty"`
752
753	// SpeechContexts: *Optional* array of SpeechContext.
754	// A means to provide context to assist the speech recognition. For
755	// more
756	// information, see [Phrase
757	// Hints](/speech-to-text/docs/basics#phrase-hints).
758	SpeechContexts []*SpeechContext `json:"speechContexts,omitempty"`
759
760	// UseEnhanced: *Optional* Set to true to use an enhanced model for
761	// speech recognition.
762	// If `use_enhanced` is set to true and the `model` field is not set,
763	// then
764	// an appropriate enhanced model is chosen if:
765	// 1. project is eligible for requesting enhanced models
766	// 2. an enhanced model exists for the audio
767	//
768	// If `use_enhanced` is true and an enhanced version of the specified
769	// model
770	// does not exist, then the speech is recognized using the standard
771	// version
772	// of the specified model.
773	//
774	// Enhanced speech models require that you opt-in to data logging
775	// using
776	// instructions in
777	// the
778	// [documentation](/speech-to-text/docs/enable-data-logging). If you
779	// set
780	// `use_enhanced` to true and you have not enabled audio logging, then
781	// you
782	// will receive an error.
783	UseEnhanced bool `json:"useEnhanced,omitempty"`
784
785	// ForceSendFields is a list of field names (e.g.
786	// "AlternativeLanguageCodes") to unconditionally include in API
787	// requests. By default, fields with empty values are omitted from API
788	// requests. However, any non-pointer, non-interface field appearing in
789	// ForceSendFields will be sent to the server regardless of whether the
790	// field is empty or not. This may be used to include empty fields in
791	// Patch requests.
792	ForceSendFields []string `json:"-"`
793
794	// NullFields is a list of field names (e.g. "AlternativeLanguageCodes")
795	// to include in API requests with the JSON null value. By default,
796	// fields with empty values are omitted from API requests. However, any
797	// field with an empty value appearing in NullFields will be sent to the
798	// server as null. It is an error if a field in this list has a
799	// non-empty value. This may be used to include null fields in Patch
800	// requests.
801	NullFields []string `json:"-"`
802}
803
804func (s *RecognitionConfig) MarshalJSON() ([]byte, error) {
805	type NoMethod RecognitionConfig
806	raw := NoMethod(*s)
807	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
808}
809
810// RecognitionMetadata: Description of audio data to be recognized.
811type RecognitionMetadata struct {
812	// AudioTopic: Description of the content. Eg. "Recordings of federal
813	// supreme court
814	// hearings from 2012".
815	AudioTopic string `json:"audioTopic,omitempty"`
816
817	// IndustryNaicsCodeOfAudio: The industry vertical to which this speech
818	// recognition request most
819	// closely applies. This is most indicative of the topics contained
820	// in the audio.  Use the 6-digit NAICS code to identify the
821	// industry
822	// vertical - see https://www.naics.com/search/.
823	IndustryNaicsCodeOfAudio int64 `json:"industryNaicsCodeOfAudio,omitempty"`
824
825	// InteractionType: The use case most closely describing the audio
826	// content to be recognized.
827	//
828	// Possible values:
829	//   "INTERACTION_TYPE_UNSPECIFIED" - Use case is either unknown or is
830	// something other than one of the other
831	// values below.
832	//   "DISCUSSION" - Multiple people in a conversation or discussion. For
833	// example in a
834	// meeting with two or more people actively participating. Typically
835	// all the primary people speaking would be in the same room (if
836	// not,
837	// see PHONE_CALL)
838	//   "PRESENTATION" - One or more persons lecturing or presenting to
839	// others, mostly
840	// uninterrupted.
841	//   "PHONE_CALL" - A phone-call or video-conference in which two or
842	// more people, who are
843	// not in the same room, are actively participating.
844	//   "VOICEMAIL" - A recorded message intended for another person to
845	// listen to.
846	//   "PROFESSIONALLY_PRODUCED" - Professionally produced audio (eg. TV
847	// Show, Podcast).
848	//   "VOICE_SEARCH" - Transcribe spoken questions and queries into text.
849	//   "VOICE_COMMAND" - Transcribe voice commands, such as for
850	// controlling a device.
851	//   "DICTATION" - Transcribe speech to text to create a written
852	// document, such as a
853	// text-message, email or report.
854	InteractionType string `json:"interactionType,omitempty"`
855
856	// MicrophoneDistance: The audio type that most closely describes the
857	// audio being recognized.
858	//
859	// Possible values:
860	//   "MICROPHONE_DISTANCE_UNSPECIFIED" - Audio type is not known.
861	//   "NEARFIELD" - The audio was captured from a closely placed
862	// microphone. Eg. phone,
863	// dictaphone, or handheld microphone. Generally if there speaker is
864	// within
865	// 1 meter of the microphone.
866	//   "MIDFIELD" - The speaker if within 3 meters of the microphone.
867	//   "FARFIELD" - The speaker is more than 3 meters away from the
868	// microphone.
869	MicrophoneDistance string `json:"microphoneDistance,omitempty"`
870
871	// ObfuscatedId: Obfuscated (privacy-protected) ID of the user, to
872	// identify number of
873	// unique users using the service.
874	ObfuscatedId int64 `json:"obfuscatedId,omitempty,string"`
875
876	// OriginalMediaType: The original media the speech was recorded on.
877	//
878	// Possible values:
879	//   "ORIGINAL_MEDIA_TYPE_UNSPECIFIED" - Unknown original media type.
880	//   "AUDIO" - The speech data is an audio recording.
881	//   "VIDEO" - The speech data originally recorded on a video.
882	OriginalMediaType string `json:"originalMediaType,omitempty"`
883
884	// OriginalMimeType: Mime type of the original audio file.  For example
885	// `audio/m4a`,
886	// `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
887	// A list of possible audio mime types is maintained
888	// at
889	// http://www.iana.org/assignments/media-types/media-types.xhtml#audio
890	OriginalMimeType string `json:"originalMimeType,omitempty"`
891
892	// RecordingDeviceName: The device used to make the recording.  Examples
893	// 'Nexus 5X' or
894	// 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
895	// 'Cardioid Microphone'.
896	RecordingDeviceName string `json:"recordingDeviceName,omitempty"`
897
898	// RecordingDeviceType: The type of device the speech was recorded with.
899	//
900	// Possible values:
901	//   "RECORDING_DEVICE_TYPE_UNSPECIFIED" - The recording device is
902	// unknown.
903	//   "SMARTPHONE" - Speech was recorded on a smartphone.
904	//   "PC" - Speech was recorded using a personal computer or tablet.
905	//   "PHONE_LINE" - Speech was recorded over a phone line.
906	//   "VEHICLE" - Speech was recorded in a vehicle.
907	//   "OTHER_OUTDOOR_DEVICE" - Speech was recorded outdoors.
908	//   "OTHER_INDOOR_DEVICE" - Speech was recorded indoors.
909	RecordingDeviceType string `json:"recordingDeviceType,omitempty"`
910
911	// ForceSendFields is a list of field names (e.g. "AudioTopic") to
912	// unconditionally include in API requests. By default, fields with
913	// empty values are omitted from API requests. However, any non-pointer,
914	// non-interface field appearing in ForceSendFields will be sent to the
915	// server regardless of whether the field is empty or not. This may be
916	// used to include empty fields in Patch requests.
917	ForceSendFields []string `json:"-"`
918
919	// NullFields is a list of field names (e.g. "AudioTopic") to include in
920	// API requests with the JSON null value. By default, fields with empty
921	// values are omitted from API requests. However, any field with an
922	// empty value appearing in NullFields will be sent to the server as
923	// null. It is an error if a field in this list has a non-empty value.
924	// This may be used to include null fields in Patch requests.
925	NullFields []string `json:"-"`
926}
927
928func (s *RecognitionMetadata) MarshalJSON() ([]byte, error) {
929	type NoMethod RecognitionMetadata
930	raw := NoMethod(*s)
931	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
932}
933
934// RecognizeRequest: The top-level message sent by the client for the
935// `Recognize` method.
936type RecognizeRequest struct {
937	// Audio: *Required* The audio data to be recognized.
938	Audio *RecognitionAudio `json:"audio,omitempty"`
939
940	// Config: *Required* Provides information to the recognizer that
941	// specifies how to
942	// process the request.
943	Config *RecognitionConfig `json:"config,omitempty"`
944
945	// Name: *Optional* The name of the model to use for recognition.
946	Name string `json:"name,omitempty"`
947
948	// ForceSendFields is a list of field names (e.g. "Audio") to
949	// unconditionally include in API requests. By default, fields with
950	// empty values are omitted from API requests. However, any non-pointer,
951	// non-interface field appearing in ForceSendFields will be sent to the
952	// server regardless of whether the field is empty or not. This may be
953	// used to include empty fields in Patch requests.
954	ForceSendFields []string `json:"-"`
955
956	// NullFields is a list of field names (e.g. "Audio") to include in API
957	// requests with the JSON null value. By default, fields with empty
958	// values are omitted from API requests. However, any field with an
959	// empty value appearing in NullFields will be sent to the server as
960	// null. It is an error if a field in this list has a non-empty value.
961	// This may be used to include null fields in Patch requests.
962	NullFields []string `json:"-"`
963}
964
965func (s *RecognizeRequest) MarshalJSON() ([]byte, error) {
966	type NoMethod RecognizeRequest
967	raw := NoMethod(*s)
968	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
969}
970
971// RecognizeResponse: The only message returned to the client by the
972// `Recognize` method. It
973// contains the result as zero or more sequential
974// `SpeechRecognitionResult`
975// messages.
976type RecognizeResponse struct {
977	// Results: Output only. Sequential list of transcription results
978	// corresponding to
979	// sequential portions of audio.
980	Results []*SpeechRecognitionResult `json:"results,omitempty"`
981
982	// ServerResponse contains the HTTP response code and headers from the
983	// server.
984	googleapi.ServerResponse `json:"-"`
985
986	// ForceSendFields is a list of field names (e.g. "Results") to
987	// unconditionally include in API requests. By default, fields with
988	// empty values are omitted from API requests. However, any non-pointer,
989	// non-interface field appearing in ForceSendFields will be sent to the
990	// server regardless of whether the field is empty or not. This may be
991	// used to include empty fields in Patch requests.
992	ForceSendFields []string `json:"-"`
993
994	// NullFields is a list of field names (e.g. "Results") to include in
995	// API requests with the JSON null value. By default, fields with empty
996	// values are omitted from API requests. However, any field with an
997	// empty value appearing in NullFields will be sent to the server as
998	// null. It is an error if a field in this list has a non-empty value.
999	// This may be used to include null fields in Patch requests.
1000	NullFields []string `json:"-"`
1001}
1002
1003func (s *RecognizeResponse) MarshalJSON() ([]byte, error) {
1004	type NoMethod RecognizeResponse
1005	raw := NoMethod(*s)
1006	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
1007}
1008
1009type SpeakerDiarizationConfig struct {
1010	// EnableSpeakerDiarization: *Optional* If 'true', enables speaker
1011	// detection for each recognized word in
1012	// the top alternative of the recognition result using a speaker_tag
1013	// provided
1014	// in the WordInfo.
1015	EnableSpeakerDiarization bool `json:"enableSpeakerDiarization,omitempty"`
1016
1017	// MaxSpeakerCount: *Optional* Only used if diarization_speaker_count is
1018	// not set.
1019	// Maximum number of speakers in the conversation. This range gives you
1020	// more
1021	// flexibility by allowing the system to automatically determine the
1022	// correct
1023	// number of speakers. If not set, the default value is 6.
1024	MaxSpeakerCount int64 `json:"maxSpeakerCount,omitempty"`
1025
1026	// MinSpeakerCount: *Optional* Only used if diarization_speaker_count is
1027	// not set.
1028	// Minimum number of speakers in the conversation. This range gives you
1029	// more
1030	// flexibility by allowing the system to automatically determine the
1031	// correct
1032	// number of speakers. If not set, the default value is 2.
1033	MinSpeakerCount int64 `json:"minSpeakerCount,omitempty"`
1034
1035	// ForceSendFields is a list of field names (e.g.
1036	// "EnableSpeakerDiarization") to unconditionally include in API
1037	// requests. By default, fields with empty values are omitted from API
1038	// requests. However, any non-pointer, non-interface field appearing in
1039	// ForceSendFields will be sent to the server regardless of whether the
1040	// field is empty or not. This may be used to include empty fields in
1041	// Patch requests.
1042	ForceSendFields []string `json:"-"`
1043
1044	// NullFields is a list of field names (e.g. "EnableSpeakerDiarization")
1045	// to include in API requests with the JSON null value. By default,
1046	// fields with empty values are omitted from API requests. However, any
1047	// field with an empty value appearing in NullFields will be sent to the
1048	// server as null. It is an error if a field in this list has a
1049	// non-empty value. This may be used to include null fields in Patch
1050	// requests.
1051	NullFields []string `json:"-"`
1052}
1053
1054func (s *SpeakerDiarizationConfig) MarshalJSON() ([]byte, error) {
1055	type NoMethod SpeakerDiarizationConfig
1056	raw := NoMethod(*s)
1057	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
1058}
1059
1060// SpeechContext: Provides "hints" to the speech recognizer to favor
1061// specific words and phrases
1062// in the results.
1063type SpeechContext struct {
1064	// Boost: Hint Boost. Positive value will increase the probability that
1065	// a specific
1066	// phrase will be recognized over other similar sounding phrases. The
1067	// higher
1068	// the boost, the higher the chance of false positive recognition as
1069	// well.
1070	// Negative boost values would correspond to anti-biasing. Anti-biasing
1071	// is not
1072	// enabled, so negative boost will simply be ignored. Though `boost`
1073	// can
1074	// accept a wide range of positive values, most use cases are best
1075	// served with
1076	// values between 0 and 20. We recommend using a binary search approach
1077	// to
1078	// finding the optimal value for your use case.
1079	Boost float64 `json:"boost,omitempty"`
1080
1081	// Phrases: *Optional* A list of strings containing words and phrases
1082	// "hints" so that
1083	// the speech recognition is more likely to recognize them. This can be
1084	// used
1085	// to improve the accuracy for specific words and phrases, for example,
1086	// if
1087	// specific commands are typically spoken by the user. This can also be
1088	// used
1089	// to add additional words to the vocabulary of the recognizer.
1090	// See
1091	// [usage limits](/speech-to-text/quotas#content).
1092	//
1093	// List items can also be set to classes for groups of words that
1094	// represent
1095	// common concepts that occur in natural language. For example, rather
1096	// than
1097	// providing phrase hints for every month of the year, using the $MONTH
1098	// class
1099	// improves the likelihood of correctly transcribing audio that
1100	// includes
1101	// months.
1102	Phrases []string `json:"phrases,omitempty"`
1103
1104	// ForceSendFields is a list of field names (e.g. "Boost") to
1105	// unconditionally include in API requests. By default, fields with
1106	// empty values are omitted from API requests. However, any non-pointer,
1107	// non-interface field appearing in ForceSendFields will be sent to the
1108	// server regardless of whether the field is empty or not. This may be
1109	// used to include empty fields in Patch requests.
1110	ForceSendFields []string `json:"-"`
1111
1112	// NullFields is a list of field names (e.g. "Boost") to include in API
1113	// requests with the JSON null value. By default, fields with empty
1114	// values are omitted from API requests. However, any field with an
1115	// empty value appearing in NullFields will be sent to the server as
1116	// null. It is an error if a field in this list has a non-empty value.
1117	// This may be used to include null fields in Patch requests.
1118	NullFields []string `json:"-"`
1119}
1120
1121func (s *SpeechContext) MarshalJSON() ([]byte, error) {
1122	type NoMethod SpeechContext
1123	raw := NoMethod(*s)
1124	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
1125}
1126
1127func (s *SpeechContext) UnmarshalJSON(data []byte) error {
1128	type NoMethod SpeechContext
1129	var s1 struct {
1130		Boost gensupport.JSONFloat64 `json:"boost"`
1131		*NoMethod
1132	}
1133	s1.NoMethod = (*NoMethod)(s)
1134	if err := json.Unmarshal(data, &s1); err != nil {
1135		return err
1136	}
1137	s.Boost = float64(s1.Boost)
1138	return nil
1139}
1140
1141// SpeechRecognitionAlternative: Alternative hypotheses (a.k.a. n-best
1142// list).
1143type SpeechRecognitionAlternative struct {
1144	// Confidence: Output only. The confidence estimate between 0.0 and 1.0.
1145	// A higher number
1146	// indicates an estimated greater likelihood that the recognized words
1147	// are
1148	// correct. This field is set only for the top alternative of a
1149	// non-streaming
1150	// result or, of a streaming result where `is_final=true`.
1151	// This field is not guaranteed to be accurate and users should not rely
1152	// on it
1153	// to be always provided.
1154	// The default of 0.0 is a sentinel value indicating `confidence` was
1155	// not set.
1156	Confidence float64 `json:"confidence,omitempty"`
1157
1158	// Transcript: Output only. Transcript text representing the words that
1159	// the user spoke.
1160	Transcript string `json:"transcript,omitempty"`
1161
1162	// Words: Output only. A list of word-specific information for each
1163	// recognized word.
1164	// Note: When `enable_speaker_diarization` is true, you will see all the
1165	// words
1166	// from the beginning of the audio.
1167	Words []*WordInfo `json:"words,omitempty"`
1168
1169	// ForceSendFields is a list of field names (e.g. "Confidence") to
1170	// unconditionally include in API requests. By default, fields with
1171	// empty values are omitted from API requests. However, any non-pointer,
1172	// non-interface field appearing in ForceSendFields will be sent to the
1173	// server regardless of whether the field is empty or not. This may be
1174	// used to include empty fields in Patch requests.
1175	ForceSendFields []string `json:"-"`
1176
1177	// NullFields is a list of field names (e.g. "Confidence") to include in
1178	// API requests with the JSON null value. By default, fields with empty
1179	// values are omitted from API requests. However, any field with an
1180	// empty value appearing in NullFields will be sent to the server as
1181	// null. It is an error if a field in this list has a non-empty value.
1182	// This may be used to include null fields in Patch requests.
1183	NullFields []string `json:"-"`
1184}
1185
1186func (s *SpeechRecognitionAlternative) MarshalJSON() ([]byte, error) {
1187	type NoMethod SpeechRecognitionAlternative
1188	raw := NoMethod(*s)
1189	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
1190}
1191
1192func (s *SpeechRecognitionAlternative) UnmarshalJSON(data []byte) error {
1193	type NoMethod SpeechRecognitionAlternative
1194	var s1 struct {
1195		Confidence gensupport.JSONFloat64 `json:"confidence"`
1196		*NoMethod
1197	}
1198	s1.NoMethod = (*NoMethod)(s)
1199	if err := json.Unmarshal(data, &s1); err != nil {
1200		return err
1201	}
1202	s.Confidence = float64(s1.Confidence)
1203	return nil
1204}
1205
1206// SpeechRecognitionResult: A speech recognition result corresponding to
1207// a portion of the audio.
1208type SpeechRecognitionResult struct {
1209	// Alternatives: Output only. May contain one or more recognition
1210	// hypotheses (up to the
1211	// maximum specified in `max_alternatives`).
1212	// These alternatives are ordered in terms of accuracy, with the top
1213	// (first)
1214	// alternative being the most probable, as ranked by the recognizer.
1215	Alternatives []*SpeechRecognitionAlternative `json:"alternatives,omitempty"`
1216
1217	// ChannelTag: For multi-channel audio, this is the channel number
1218	// corresponding to the
1219	// recognized result for the audio from that channel.
1220	// For audio_channel_count = N, its output values can range from '1' to
1221	// 'N'.
1222	ChannelTag int64 `json:"channelTag,omitempty"`
1223
1224	// LanguageCode: Output only.
1225	// The
1226	// [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag
1227	// of the
1228	// language in this result. This language code was detected to have the
1229	// most
1230	// likelihood of being spoken in the audio.
1231	LanguageCode string `json:"languageCode,omitempty"`
1232
1233	// ForceSendFields is a list of field names (e.g. "Alternatives") to
1234	// unconditionally include in API requests. By default, fields with
1235	// empty values are omitted from API requests. However, any non-pointer,
1236	// non-interface field appearing in ForceSendFields will be sent to the
1237	// server regardless of whether the field is empty or not. This may be
1238	// used to include empty fields in Patch requests.
1239	ForceSendFields []string `json:"-"`
1240
1241	// NullFields is a list of field names (e.g. "Alternatives") to include
1242	// in API requests with the JSON null value. By default, fields with
1243	// empty values are omitted from API requests. However, any field with
1244	// an empty value appearing in NullFields will be sent to the server as
1245	// null. It is an error if a field in this list has a non-empty value.
1246	// This may be used to include null fields in Patch requests.
1247	NullFields []string `json:"-"`
1248}
1249
1250func (s *SpeechRecognitionResult) MarshalJSON() ([]byte, error) {
1251	type NoMethod SpeechRecognitionResult
1252	raw := NoMethod(*s)
1253	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
1254}
1255
1256// Status: The `Status` type defines a logical error model that is
1257// suitable for
1258// different programming environments, including REST APIs and RPC APIs.
1259// It is
1260// used by [gRPC](https://github.com/grpc). Each `Status` message
1261// contains
1262// three pieces of data: error code, error message, and error
1263// details.
1264//
1265// You can find out more about this error model and how to work with it
1266// in the
1267// [API Design Guide](https://cloud.google.com/apis/design/errors).
1268type Status struct {
1269	// Code: The status code, which should be an enum value of
1270	// google.rpc.Code.
1271	Code int64 `json:"code,omitempty"`
1272
1273	// Details: A list of messages that carry the error details.  There is a
1274	// common set of
1275	// message types for APIs to use.
1276	Details []googleapi.RawMessage `json:"details,omitempty"`
1277
1278	// Message: A developer-facing error message, which should be in
1279	// English. Any
1280	// user-facing error message should be localized and sent in
1281	// the
1282	// google.rpc.Status.details field, or localized by the client.
1283	Message string `json:"message,omitempty"`
1284
1285	// ForceSendFields is a list of field names (e.g. "Code") to
1286	// unconditionally include in API requests. By default, fields with
1287	// empty values are omitted from API requests. However, any non-pointer,
1288	// non-interface field appearing in ForceSendFields will be sent to the
1289	// server regardless of whether the field is empty or not. This may be
1290	// used to include empty fields in Patch requests.
1291	ForceSendFields []string `json:"-"`
1292
1293	// NullFields is a list of field names (e.g. "Code") to include in API
1294	// requests with the JSON null value. By default, fields with empty
1295	// values are omitted from API requests. However, any field with an
1296	// empty value appearing in NullFields will be sent to the server as
1297	// null. It is an error if a field in this list has a non-empty value.
1298	// This may be used to include null fields in Patch requests.
1299	NullFields []string `json:"-"`
1300}
1301
1302func (s *Status) MarshalJSON() ([]byte, error) {
1303	type NoMethod Status
1304	raw := NoMethod(*s)
1305	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
1306}
1307
1308// WordInfo: Word-specific information for recognized words.
1309type WordInfo struct {
1310	// Confidence: Output only. The confidence estimate between 0.0 and 1.0.
1311	// A higher number
1312	// indicates an estimated greater likelihood that the recognized words
1313	// are
1314	// correct. This field is set only for the top alternative of a
1315	// non-streaming
1316	// result or, of a streaming result where `is_final=true`.
1317	// This field is not guaranteed to be accurate and users should not rely
1318	// on it
1319	// to be always provided.
1320	// The default of 0.0 is a sentinel value indicating `confidence` was
1321	// not set.
1322	Confidence float64 `json:"confidence,omitempty"`
1323
1324	// EndTime: Output only. Time offset relative to the beginning of the
1325	// audio,
1326	// and corresponding to the end of the spoken word.
1327	// This field is only set if `enable_word_time_offsets=true` and only
1328	// in the top hypothesis.
1329	// This is an experimental feature and the accuracy of the time offset
1330	// can
1331	// vary.
1332	EndTime string `json:"endTime,omitempty"`
1333
1334	// SpeakerTag: Output only. A distinct integer value is assigned for
1335	// every speaker within
1336	// the audio. This field specifies which one of those speakers was
1337	// detected to
1338	// have spoken this word. Value ranges from '1' to
1339	// diarization_speaker_count.
1340	// speaker_tag is set if enable_speaker_diarization = 'true' and only in
1341	// the
1342	// top alternative.
1343	SpeakerTag int64 `json:"speakerTag,omitempty"`
1344
1345	// StartTime: Output only. Time offset relative to the beginning of the
1346	// audio,
1347	// and corresponding to the start of the spoken word.
1348	// This field is only set if `enable_word_time_offsets=true` and only
1349	// in the top hypothesis.
1350	// This is an experimental feature and the accuracy of the time offset
1351	// can
1352	// vary.
1353	StartTime string `json:"startTime,omitempty"`
1354
1355	// Word: Output only. The word corresponding to this set of information.
1356	Word string `json:"word,omitempty"`
1357
1358	// ForceSendFields is a list of field names (e.g. "Confidence") to
1359	// unconditionally include in API requests. By default, fields with
1360	// empty values are omitted from API requests. However, any non-pointer,
1361	// non-interface field appearing in ForceSendFields will be sent to the
1362	// server regardless of whether the field is empty or not. This may be
1363	// used to include empty fields in Patch requests.
1364	ForceSendFields []string `json:"-"`
1365
1366	// NullFields is a list of field names (e.g. "Confidence") to include in
1367	// API requests with the JSON null value. By default, fields with empty
1368	// values are omitted from API requests. However, any field with an
1369	// empty value appearing in NullFields will be sent to the server as
1370	// null. It is an error if a field in this list has a non-empty value.
1371	// This may be used to include null fields in Patch requests.
1372	NullFields []string `json:"-"`
1373}
1374
1375func (s *WordInfo) MarshalJSON() ([]byte, error) {
1376	type NoMethod WordInfo
1377	raw := NoMethod(*s)
1378	return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
1379}
1380
1381func (s *WordInfo) UnmarshalJSON(data []byte) error {
1382	type NoMethod WordInfo
1383	var s1 struct {
1384		Confidence gensupport.JSONFloat64 `json:"confidence"`
1385		*NoMethod
1386	}
1387	s1.NoMethod = (*NoMethod)(s)
1388	if err := json.Unmarshal(data, &s1); err != nil {
1389		return err
1390	}
1391	s.Confidence = float64(s1.Confidence)
1392	return nil
1393}
1394
1395// method id "speech.operations.get":
1396
1397type OperationsGetCall struct {
1398	s            *Service
1399	name         string
1400	urlParams_   gensupport.URLParams
1401	ifNoneMatch_ string
1402	ctx_         context.Context
1403	header_      http.Header
1404}
1405
1406// Get: Gets the latest state of a long-running operation.  Clients can
1407// use this
1408// method to poll the operation result at intervals as recommended by
1409// the API
1410// service.
1411func (r *OperationsService) Get(name string) *OperationsGetCall {
1412	c := &OperationsGetCall{s: r.s, urlParams_: make(gensupport.URLParams)}
1413	c.name = name
1414	return c
1415}
1416
1417// Fields allows partial responses to be retrieved. See
1418// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
1419// for more information.
1420func (c *OperationsGetCall) Fields(s ...googleapi.Field) *OperationsGetCall {
1421	c.urlParams_.Set("fields", googleapi.CombineFields(s))
1422	return c
1423}
1424
1425// IfNoneMatch sets the optional parameter which makes the operation
1426// fail if the object's ETag matches the given value. This is useful for
1427// getting updates only after the object has changed since the last
1428// request. Use googleapi.IsNotModified to check whether the response
1429// error from Do is the result of In-None-Match.
1430func (c *OperationsGetCall) IfNoneMatch(entityTag string) *OperationsGetCall {
1431	c.ifNoneMatch_ = entityTag
1432	return c
1433}
1434
1435// Context sets the context to be used in this call's Do method. Any
1436// pending HTTP request will be aborted if the provided context is
1437// canceled.
1438func (c *OperationsGetCall) Context(ctx context.Context) *OperationsGetCall {
1439	c.ctx_ = ctx
1440	return c
1441}
1442
1443// Header returns an http.Header that can be modified by the caller to
1444// add HTTP headers to the request.
1445func (c *OperationsGetCall) Header() http.Header {
1446	if c.header_ == nil {
1447		c.header_ = make(http.Header)
1448	}
1449	return c.header_
1450}
1451
1452func (c *OperationsGetCall) doRequest(alt string) (*http.Response, error) {
1453	reqHeaders := make(http.Header)
1454	for k, v := range c.header_ {
1455		reqHeaders[k] = v
1456	}
1457	reqHeaders.Set("User-Agent", c.s.userAgent())
1458	if c.ifNoneMatch_ != "" {
1459		reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
1460	}
1461	var body io.Reader = nil
1462	c.urlParams_.Set("alt", alt)
1463	c.urlParams_.Set("prettyPrint", "false")
1464	urls := googleapi.ResolveRelative(c.s.BasePath, "v1p1beta1/operations/{+name}")
1465	urls += "?" + c.urlParams_.Encode()
1466	req, err := http.NewRequest("GET", urls, body)
1467	if err != nil {
1468		return nil, err
1469	}
1470	req.Header = reqHeaders
1471	googleapi.Expand(req.URL, map[string]string{
1472		"name": c.name,
1473	})
1474	return gensupport.SendRequest(c.ctx_, c.s.client, req)
1475}
1476
1477// Do executes the "speech.operations.get" call.
1478// Exactly one of *Operation or error will be non-nil. Any non-2xx
1479// status code is an error. Response headers are in either
1480// *Operation.ServerResponse.Header or (if a response was returned at
1481// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
1482// to check whether the returned error was because
1483// http.StatusNotModified was returned.
1484func (c *OperationsGetCall) Do(opts ...googleapi.CallOption) (*Operation, error) {
1485	gensupport.SetOptions(c.urlParams_, opts...)
1486	res, err := c.doRequest("json")
1487	if res != nil && res.StatusCode == http.StatusNotModified {
1488		if res.Body != nil {
1489			res.Body.Close()
1490		}
1491		return nil, &googleapi.Error{
1492			Code:   res.StatusCode,
1493			Header: res.Header,
1494		}
1495	}
1496	if err != nil {
1497		return nil, err
1498	}
1499	defer googleapi.CloseBody(res)
1500	if err := googleapi.CheckResponse(res); err != nil {
1501		return nil, err
1502	}
1503	ret := &Operation{
1504		ServerResponse: googleapi.ServerResponse{
1505			Header:         res.Header,
1506			HTTPStatusCode: res.StatusCode,
1507		},
1508	}
1509	target := &ret
1510	if err := gensupport.DecodeResponse(target, res); err != nil {
1511		return nil, err
1512	}
1513	return ret, nil
1514	// {
1515	//   "description": "Gets the latest state of a long-running operation.  Clients can use this\nmethod to poll the operation result at intervals as recommended by the API\nservice.",
1516	//   "flatPath": "v1p1beta1/operations/{operationsId}",
1517	//   "httpMethod": "GET",
1518	//   "id": "speech.operations.get",
1519	//   "parameterOrder": [
1520	//     "name"
1521	//   ],
1522	//   "parameters": {
1523	//     "name": {
1524	//       "description": "The name of the operation resource.",
1525	//       "location": "path",
1526	//       "pattern": "^.+$",
1527	//       "required": true,
1528	//       "type": "string"
1529	//     }
1530	//   },
1531	//   "path": "v1p1beta1/operations/{+name}",
1532	//   "response": {
1533	//     "$ref": "Operation"
1534	//   },
1535	//   "scopes": [
1536	//     "https://www.googleapis.com/auth/cloud-platform"
1537	//   ]
1538	// }
1539
1540}
1541
1542// method id "speech.operations.list":
1543
1544type OperationsListCall struct {
1545	s            *Service
1546	urlParams_   gensupport.URLParams
1547	ifNoneMatch_ string
1548	ctx_         context.Context
1549	header_      http.Header
1550}
1551
1552// List: Lists operations that match the specified filter in the
1553// request. If the
1554// server doesn't support this method, it returns
1555// `UNIMPLEMENTED`.
1556//
1557// NOTE: the `name` binding allows API services to override the
1558// binding
1559// to use different resource name schemes, such as `users/*/operations`.
1560// To
1561// override the binding, API services can add a binding such
1562// as
1563// "/v1/{name=users/*}/operations" to their service configuration.
1564// For backwards compatibility, the default name includes the
1565// operations
1566// collection id, however overriding users must ensure the name
1567// binding
1568// is the parent resource, without the operations collection id.
1569func (r *OperationsService) List() *OperationsListCall {
1570	c := &OperationsListCall{s: r.s, urlParams_: make(gensupport.URLParams)}
1571	return c
1572}
1573
1574// Filter sets the optional parameter "filter": The standard list
1575// filter.
1576func (c *OperationsListCall) Filter(filter string) *OperationsListCall {
1577	c.urlParams_.Set("filter", filter)
1578	return c
1579}
1580
1581// Name sets the optional parameter "name": The name of the operation's
1582// parent resource.
1583func (c *OperationsListCall) Name(name string) *OperationsListCall {
1584	c.urlParams_.Set("name", name)
1585	return c
1586}
1587
1588// PageSize sets the optional parameter "pageSize": The standard list
1589// page size.
1590func (c *OperationsListCall) PageSize(pageSize int64) *OperationsListCall {
1591	c.urlParams_.Set("pageSize", fmt.Sprint(pageSize))
1592	return c
1593}
1594
1595// PageToken sets the optional parameter "pageToken": The standard list
1596// page token.
1597func (c *OperationsListCall) PageToken(pageToken string) *OperationsListCall {
1598	c.urlParams_.Set("pageToken", pageToken)
1599	return c
1600}
1601
1602// Fields allows partial responses to be retrieved. See
1603// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
1604// for more information.
1605func (c *OperationsListCall) Fields(s ...googleapi.Field) *OperationsListCall {
1606	c.urlParams_.Set("fields", googleapi.CombineFields(s))
1607	return c
1608}
1609
1610// IfNoneMatch sets the optional parameter which makes the operation
1611// fail if the object's ETag matches the given value. This is useful for
1612// getting updates only after the object has changed since the last
1613// request. Use googleapi.IsNotModified to check whether the response
1614// error from Do is the result of In-None-Match.
1615func (c *OperationsListCall) IfNoneMatch(entityTag string) *OperationsListCall {
1616	c.ifNoneMatch_ = entityTag
1617	return c
1618}
1619
1620// Context sets the context to be used in this call's Do method. Any
1621// pending HTTP request will be aborted if the provided context is
1622// canceled.
1623func (c *OperationsListCall) Context(ctx context.Context) *OperationsListCall {
1624	c.ctx_ = ctx
1625	return c
1626}
1627
1628// Header returns an http.Header that can be modified by the caller to
1629// add HTTP headers to the request.
1630func (c *OperationsListCall) Header() http.Header {
1631	if c.header_ == nil {
1632		c.header_ = make(http.Header)
1633	}
1634	return c.header_
1635}
1636
1637func (c *OperationsListCall) doRequest(alt string) (*http.Response, error) {
1638	reqHeaders := make(http.Header)
1639	for k, v := range c.header_ {
1640		reqHeaders[k] = v
1641	}
1642	reqHeaders.Set("User-Agent", c.s.userAgent())
1643	if c.ifNoneMatch_ != "" {
1644		reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
1645	}
1646	var body io.Reader = nil
1647	c.urlParams_.Set("alt", alt)
1648	c.urlParams_.Set("prettyPrint", "false")
1649	urls := googleapi.ResolveRelative(c.s.BasePath, "v1p1beta1/operations")
1650	urls += "?" + c.urlParams_.Encode()
1651	req, err := http.NewRequest("GET", urls, body)
1652	if err != nil {
1653		return nil, err
1654	}
1655	req.Header = reqHeaders
1656	return gensupport.SendRequest(c.ctx_, c.s.client, req)
1657}
1658
1659// Do executes the "speech.operations.list" call.
1660// Exactly one of *ListOperationsResponse or error will be non-nil. Any
1661// non-2xx status code is an error. Response headers are in either
1662// *ListOperationsResponse.ServerResponse.Header or (if a response was
1663// returned at all) in error.(*googleapi.Error).Header. Use
1664// googleapi.IsNotModified to check whether the returned error was
1665// because http.StatusNotModified was returned.
1666func (c *OperationsListCall) Do(opts ...googleapi.CallOption) (*ListOperationsResponse, error) {
1667	gensupport.SetOptions(c.urlParams_, opts...)
1668	res, err := c.doRequest("json")
1669	if res != nil && res.StatusCode == http.StatusNotModified {
1670		if res.Body != nil {
1671			res.Body.Close()
1672		}
1673		return nil, &googleapi.Error{
1674			Code:   res.StatusCode,
1675			Header: res.Header,
1676		}
1677	}
1678	if err != nil {
1679		return nil, err
1680	}
1681	defer googleapi.CloseBody(res)
1682	if err := googleapi.CheckResponse(res); err != nil {
1683		return nil, err
1684	}
1685	ret := &ListOperationsResponse{
1686		ServerResponse: googleapi.ServerResponse{
1687			Header:         res.Header,
1688			HTTPStatusCode: res.StatusCode,
1689		},
1690	}
1691	target := &ret
1692	if err := gensupport.DecodeResponse(target, res); err != nil {
1693		return nil, err
1694	}
1695	return ret, nil
1696	// {
1697	//   "description": "Lists operations that match the specified filter in the request. If the\nserver doesn't support this method, it returns `UNIMPLEMENTED`.\n\nNOTE: the `name` binding allows API services to override the binding\nto use different resource name schemes, such as `users/*/operations`. To\noverride the binding, API services can add a binding such as\n`\"/v1/{name=users/*}/operations\"` to their service configuration.\nFor backwards compatibility, the default name includes the operations\ncollection id, however overriding users must ensure the name binding\nis the parent resource, without the operations collection id.",
1698	//   "flatPath": "v1p1beta1/operations",
1699	//   "httpMethod": "GET",
1700	//   "id": "speech.operations.list",
1701	//   "parameterOrder": [],
1702	//   "parameters": {
1703	//     "filter": {
1704	//       "description": "The standard list filter.",
1705	//       "location": "query",
1706	//       "type": "string"
1707	//     },
1708	//     "name": {
1709	//       "description": "The name of the operation's parent resource.",
1710	//       "location": "query",
1711	//       "type": "string"
1712	//     },
1713	//     "pageSize": {
1714	//       "description": "The standard list page size.",
1715	//       "format": "int32",
1716	//       "location": "query",
1717	//       "type": "integer"
1718	//     },
1719	//     "pageToken": {
1720	//       "description": "The standard list page token.",
1721	//       "location": "query",
1722	//       "type": "string"
1723	//     }
1724	//   },
1725	//   "path": "v1p1beta1/operations",
1726	//   "response": {
1727	//     "$ref": "ListOperationsResponse"
1728	//   },
1729	//   "scopes": [
1730	//     "https://www.googleapis.com/auth/cloud-platform"
1731	//   ]
1732	// }
1733
1734}
1735
1736// Pages invokes f for each page of results.
1737// A non-nil error returned from f will halt the iteration.
1738// The provided context supersedes any context provided to the Context method.
1739func (c *OperationsListCall) Pages(ctx context.Context, f func(*ListOperationsResponse) error) error {
1740	c.ctx_ = ctx
1741	defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point
1742	for {
1743		x, err := c.Do()
1744		if err != nil {
1745			return err
1746		}
1747		if err := f(x); err != nil {
1748			return err
1749		}
1750		if x.NextPageToken == "" {
1751			return nil
1752		}
1753		c.PageToken(x.NextPageToken)
1754	}
1755}
1756
1757// method id "speech.projects.locations.operations.get":
1758
1759type ProjectsLocationsOperationsGetCall struct {
1760	s            *Service
1761	name         string
1762	urlParams_   gensupport.URLParams
1763	ifNoneMatch_ string
1764	ctx_         context.Context
1765	header_      http.Header
1766}
1767
1768// Get: Gets the latest state of a long-running operation.  Clients can
1769// use this
1770// method to poll the operation result at intervals as recommended by
1771// the API
1772// service.
1773func (r *ProjectsLocationsOperationsService) Get(name string) *ProjectsLocationsOperationsGetCall {
1774	c := &ProjectsLocationsOperationsGetCall{s: r.s, urlParams_: make(gensupport.URLParams)}
1775	c.name = name
1776	return c
1777}
1778
1779// Fields allows partial responses to be retrieved. See
1780// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
1781// for more information.
1782func (c *ProjectsLocationsOperationsGetCall) Fields(s ...googleapi.Field) *ProjectsLocationsOperationsGetCall {
1783	c.urlParams_.Set("fields", googleapi.CombineFields(s))
1784	return c
1785}
1786
1787// IfNoneMatch sets the optional parameter which makes the operation
1788// fail if the object's ETag matches the given value. This is useful for
1789// getting updates only after the object has changed since the last
1790// request. Use googleapi.IsNotModified to check whether the response
1791// error from Do is the result of In-None-Match.
1792func (c *ProjectsLocationsOperationsGetCall) IfNoneMatch(entityTag string) *ProjectsLocationsOperationsGetCall {
1793	c.ifNoneMatch_ = entityTag
1794	return c
1795}
1796
1797// Context sets the context to be used in this call's Do method. Any
1798// pending HTTP request will be aborted if the provided context is
1799// canceled.
1800func (c *ProjectsLocationsOperationsGetCall) Context(ctx context.Context) *ProjectsLocationsOperationsGetCall {
1801	c.ctx_ = ctx
1802	return c
1803}
1804
1805// Header returns an http.Header that can be modified by the caller to
1806// add HTTP headers to the request.
1807func (c *ProjectsLocationsOperationsGetCall) Header() http.Header {
1808	if c.header_ == nil {
1809		c.header_ = make(http.Header)
1810	}
1811	return c.header_
1812}
1813
1814func (c *ProjectsLocationsOperationsGetCall) doRequest(alt string) (*http.Response, error) {
1815	reqHeaders := make(http.Header)
1816	for k, v := range c.header_ {
1817		reqHeaders[k] = v
1818	}
1819	reqHeaders.Set("User-Agent", c.s.userAgent())
1820	if c.ifNoneMatch_ != "" {
1821		reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
1822	}
1823	var body io.Reader = nil
1824	c.urlParams_.Set("alt", alt)
1825	c.urlParams_.Set("prettyPrint", "false")
1826	urls := googleapi.ResolveRelative(c.s.BasePath, "v1p1beta1/{+name}")
1827	urls += "?" + c.urlParams_.Encode()
1828	req, err := http.NewRequest("GET", urls, body)
1829	if err != nil {
1830		return nil, err
1831	}
1832	req.Header = reqHeaders
1833	googleapi.Expand(req.URL, map[string]string{
1834		"name": c.name,
1835	})
1836	return gensupport.SendRequest(c.ctx_, c.s.client, req)
1837}
1838
1839// Do executes the "speech.projects.locations.operations.get" call.
1840// Exactly one of *Operation or error will be non-nil. Any non-2xx
1841// status code is an error. Response headers are in either
1842// *Operation.ServerResponse.Header or (if a response was returned at
1843// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
1844// to check whether the returned error was because
1845// http.StatusNotModified was returned.
1846func (c *ProjectsLocationsOperationsGetCall) Do(opts ...googleapi.CallOption) (*Operation, error) {
1847	gensupport.SetOptions(c.urlParams_, opts...)
1848	res, err := c.doRequest("json")
1849	if res != nil && res.StatusCode == http.StatusNotModified {
1850		if res.Body != nil {
1851			res.Body.Close()
1852		}
1853		return nil, &googleapi.Error{
1854			Code:   res.StatusCode,
1855			Header: res.Header,
1856		}
1857	}
1858	if err != nil {
1859		return nil, err
1860	}
1861	defer googleapi.CloseBody(res)
1862	if err := googleapi.CheckResponse(res); err != nil {
1863		return nil, err
1864	}
1865	ret := &Operation{
1866		ServerResponse: googleapi.ServerResponse{
1867			Header:         res.Header,
1868			HTTPStatusCode: res.StatusCode,
1869		},
1870	}
1871	target := &ret
1872	if err := gensupport.DecodeResponse(target, res); err != nil {
1873		return nil, err
1874	}
1875	return ret, nil
1876	// {
1877	//   "description": "Gets the latest state of a long-running operation.  Clients can use this\nmethod to poll the operation result at intervals as recommended by the API\nservice.",
1878	//   "flatPath": "v1p1beta1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}",
1879	//   "httpMethod": "GET",
1880	//   "id": "speech.projects.locations.operations.get",
1881	//   "parameterOrder": [
1882	//     "name"
1883	//   ],
1884	//   "parameters": {
1885	//     "name": {
1886	//       "description": "The name of the operation resource.",
1887	//       "location": "path",
1888	//       "pattern": "^projects/[^/]+/locations/[^/]+/operations/[^/]+$",
1889	//       "required": true,
1890	//       "type": "string"
1891	//     }
1892	//   },
1893	//   "path": "v1p1beta1/{+name}",
1894	//   "response": {
1895	//     "$ref": "Operation"
1896	//   },
1897	//   "scopes": [
1898	//     "https://www.googleapis.com/auth/cloud-platform"
1899	//   ]
1900	// }
1901
1902}
1903
1904// method id "speech.projects.locations.operations.list":
1905
1906type ProjectsLocationsOperationsListCall struct {
1907	s            *Service
1908	name         string
1909	urlParams_   gensupport.URLParams
1910	ifNoneMatch_ string
1911	ctx_         context.Context
1912	header_      http.Header
1913}
1914
1915// List: Lists operations that match the specified filter in the
1916// request. If the
1917// server doesn't support this method, it returns
1918// `UNIMPLEMENTED`.
1919//
1920// NOTE: the `name` binding allows API services to override the
1921// binding
1922// to use different resource name schemes, such as `users/*/operations`.
1923// To
1924// override the binding, API services can add a binding such
1925// as
1926// "/v1/{name=users/*}/operations" to their service configuration.
1927// For backwards compatibility, the default name includes the
1928// operations
1929// collection id, however overriding users must ensure the name
1930// binding
1931// is the parent resource, without the operations collection id.
1932func (r *ProjectsLocationsOperationsService) List(name string) *ProjectsLocationsOperationsListCall {
1933	c := &ProjectsLocationsOperationsListCall{s: r.s, urlParams_: make(gensupport.URLParams)}
1934	c.name = name
1935	return c
1936}
1937
1938// Filter sets the optional parameter "filter": The standard list
1939// filter.
1940func (c *ProjectsLocationsOperationsListCall) Filter(filter string) *ProjectsLocationsOperationsListCall {
1941	c.urlParams_.Set("filter", filter)
1942	return c
1943}
1944
1945// PageSize sets the optional parameter "pageSize": The standard list
1946// page size.
1947func (c *ProjectsLocationsOperationsListCall) PageSize(pageSize int64) *ProjectsLocationsOperationsListCall {
1948	c.urlParams_.Set("pageSize", fmt.Sprint(pageSize))
1949	return c
1950}
1951
1952// PageToken sets the optional parameter "pageToken": The standard list
1953// page token.
1954func (c *ProjectsLocationsOperationsListCall) PageToken(pageToken string) *ProjectsLocationsOperationsListCall {
1955	c.urlParams_.Set("pageToken", pageToken)
1956	return c
1957}
1958
1959// Fields allows partial responses to be retrieved. See
1960// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
1961// for more information.
1962func (c *ProjectsLocationsOperationsListCall) Fields(s ...googleapi.Field) *ProjectsLocationsOperationsListCall {
1963	c.urlParams_.Set("fields", googleapi.CombineFields(s))
1964	return c
1965}
1966
1967// IfNoneMatch sets the optional parameter which makes the operation
1968// fail if the object's ETag matches the given value. This is useful for
1969// getting updates only after the object has changed since the last
1970// request. Use googleapi.IsNotModified to check whether the response
1971// error from Do is the result of In-None-Match.
1972func (c *ProjectsLocationsOperationsListCall) IfNoneMatch(entityTag string) *ProjectsLocationsOperationsListCall {
1973	c.ifNoneMatch_ = entityTag
1974	return c
1975}
1976
1977// Context sets the context to be used in this call's Do method. Any
1978// pending HTTP request will be aborted if the provided context is
1979// canceled.
1980func (c *ProjectsLocationsOperationsListCall) Context(ctx context.Context) *ProjectsLocationsOperationsListCall {
1981	c.ctx_ = ctx
1982	return c
1983}
1984
1985// Header returns an http.Header that can be modified by the caller to
1986// add HTTP headers to the request.
1987func (c *ProjectsLocationsOperationsListCall) Header() http.Header {
1988	if c.header_ == nil {
1989		c.header_ = make(http.Header)
1990	}
1991	return c.header_
1992}
1993
1994func (c *ProjectsLocationsOperationsListCall) doRequest(alt string) (*http.Response, error) {
1995	reqHeaders := make(http.Header)
1996	for k, v := range c.header_ {
1997		reqHeaders[k] = v
1998	}
1999	reqHeaders.Set("User-Agent", c.s.userAgent())
2000	if c.ifNoneMatch_ != "" {
2001		reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
2002	}
2003	var body io.Reader = nil
2004	c.urlParams_.Set("alt", alt)
2005	c.urlParams_.Set("prettyPrint", "false")
2006	urls := googleapi.ResolveRelative(c.s.BasePath, "v1p1beta1/{+name}/operations")
2007	urls += "?" + c.urlParams_.Encode()
2008	req, err := http.NewRequest("GET", urls, body)
2009	if err != nil {
2010		return nil, err
2011	}
2012	req.Header = reqHeaders
2013	googleapi.Expand(req.URL, map[string]string{
2014		"name": c.name,
2015	})
2016	return gensupport.SendRequest(c.ctx_, c.s.client, req)
2017}
2018
2019// Do executes the "speech.projects.locations.operations.list" call.
2020// Exactly one of *ListOperationsResponse or error will be non-nil. Any
2021// non-2xx status code is an error. Response headers are in either
2022// *ListOperationsResponse.ServerResponse.Header or (if a response was
2023// returned at all) in error.(*googleapi.Error).Header. Use
2024// googleapi.IsNotModified to check whether the returned error was
2025// because http.StatusNotModified was returned.
2026func (c *ProjectsLocationsOperationsListCall) Do(opts ...googleapi.CallOption) (*ListOperationsResponse, error) {
2027	gensupport.SetOptions(c.urlParams_, opts...)
2028	res, err := c.doRequest("json")
2029	if res != nil && res.StatusCode == http.StatusNotModified {
2030		if res.Body != nil {
2031			res.Body.Close()
2032		}
2033		return nil, &googleapi.Error{
2034			Code:   res.StatusCode,
2035			Header: res.Header,
2036		}
2037	}
2038	if err != nil {
2039		return nil, err
2040	}
2041	defer googleapi.CloseBody(res)
2042	if err := googleapi.CheckResponse(res); err != nil {
2043		return nil, err
2044	}
2045	ret := &ListOperationsResponse{
2046		ServerResponse: googleapi.ServerResponse{
2047			Header:         res.Header,
2048			HTTPStatusCode: res.StatusCode,
2049		},
2050	}
2051	target := &ret
2052	if err := gensupport.DecodeResponse(target, res); err != nil {
2053		return nil, err
2054	}
2055	return ret, nil
2056	// {
2057	//   "description": "Lists operations that match the specified filter in the request. If the\nserver doesn't support this method, it returns `UNIMPLEMENTED`.\n\nNOTE: the `name` binding allows API services to override the binding\nto use different resource name schemes, such as `users/*/operations`. To\noverride the binding, API services can add a binding such as\n`\"/v1/{name=users/*}/operations\"` to their service configuration.\nFor backwards compatibility, the default name includes the operations\ncollection id, however overriding users must ensure the name binding\nis the parent resource, without the operations collection id.",
2058	//   "flatPath": "v1p1beta1/projects/{projectsId}/locations/{locationsId}/operations",
2059	//   "httpMethod": "GET",
2060	//   "id": "speech.projects.locations.operations.list",
2061	//   "parameterOrder": [
2062	//     "name"
2063	//   ],
2064	//   "parameters": {
2065	//     "filter": {
2066	//       "description": "The standard list filter.",
2067	//       "location": "query",
2068	//       "type": "string"
2069	//     },
2070	//     "name": {
2071	//       "description": "The name of the operation's parent resource.",
2072	//       "location": "path",
2073	//       "pattern": "^projects/[^/]+/locations/[^/]+$",
2074	//       "required": true,
2075	//       "type": "string"
2076	//     },
2077	//     "pageSize": {
2078	//       "description": "The standard list page size.",
2079	//       "format": "int32",
2080	//       "location": "query",
2081	//       "type": "integer"
2082	//     },
2083	//     "pageToken": {
2084	//       "description": "The standard list page token.",
2085	//       "location": "query",
2086	//       "type": "string"
2087	//     }
2088	//   },
2089	//   "path": "v1p1beta1/{+name}/operations",
2090	//   "response": {
2091	//     "$ref": "ListOperationsResponse"
2092	//   },
2093	//   "scopes": [
2094	//     "https://www.googleapis.com/auth/cloud-platform"
2095	//   ]
2096	// }
2097
2098}
2099
2100// Pages invokes f for each page of results.
2101// A non-nil error returned from f will halt the iteration.
2102// The provided context supersedes any context provided to the Context method.
2103func (c *ProjectsLocationsOperationsListCall) Pages(ctx context.Context, f func(*ListOperationsResponse) error) error {
2104	c.ctx_ = ctx
2105	defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point
2106	for {
2107		x, err := c.Do()
2108		if err != nil {
2109			return err
2110		}
2111		if err := f(x); err != nil {
2112			return err
2113		}
2114		if x.NextPageToken == "" {
2115			return nil
2116		}
2117		c.PageToken(x.NextPageToken)
2118	}
2119}
2120
2121// method id "speech.projects.operations.manualRecognitionTasks.get":
2122
2123type ProjectsOperationsManualRecognitionTasksGetCall struct {
2124	s            *Service
2125	name         string
2126	urlParams_   gensupport.URLParams
2127	ifNoneMatch_ string
2128	ctx_         context.Context
2129	header_      http.Header
2130}
2131
2132// Get: Gets the latest state of a long-running operation.  Clients can
2133// use this
2134// method to poll the operation result at intervals as recommended by
2135// the API
2136// service.
2137func (r *ProjectsOperationsManualRecognitionTasksService) Get(name string) *ProjectsOperationsManualRecognitionTasksGetCall {
2138	c := &ProjectsOperationsManualRecognitionTasksGetCall{s: r.s, urlParams_: make(gensupport.URLParams)}
2139	c.name = name
2140	return c
2141}
2142
2143// Fields allows partial responses to be retrieved. See
2144// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
2145// for more information.
2146func (c *ProjectsOperationsManualRecognitionTasksGetCall) Fields(s ...googleapi.Field) *ProjectsOperationsManualRecognitionTasksGetCall {
2147	c.urlParams_.Set("fields", googleapi.CombineFields(s))
2148	return c
2149}
2150
2151// IfNoneMatch sets the optional parameter which makes the operation
2152// fail if the object's ETag matches the given value. This is useful for
2153// getting updates only after the object has changed since the last
2154// request. Use googleapi.IsNotModified to check whether the response
2155// error from Do is the result of In-None-Match.
2156func (c *ProjectsOperationsManualRecognitionTasksGetCall) IfNoneMatch(entityTag string) *ProjectsOperationsManualRecognitionTasksGetCall {
2157	c.ifNoneMatch_ = entityTag
2158	return c
2159}
2160
2161// Context sets the context to be used in this call's Do method. Any
2162// pending HTTP request will be aborted if the provided context is
2163// canceled.
2164func (c *ProjectsOperationsManualRecognitionTasksGetCall) Context(ctx context.Context) *ProjectsOperationsManualRecognitionTasksGetCall {
2165	c.ctx_ = ctx
2166	return c
2167}
2168
2169// Header returns an http.Header that can be modified by the caller to
2170// add HTTP headers to the request.
2171func (c *ProjectsOperationsManualRecognitionTasksGetCall) Header() http.Header {
2172	if c.header_ == nil {
2173		c.header_ = make(http.Header)
2174	}
2175	return c.header_
2176}
2177
2178func (c *ProjectsOperationsManualRecognitionTasksGetCall) doRequest(alt string) (*http.Response, error) {
2179	reqHeaders := make(http.Header)
2180	for k, v := range c.header_ {
2181		reqHeaders[k] = v
2182	}
2183	reqHeaders.Set("User-Agent", c.s.userAgent())
2184	if c.ifNoneMatch_ != "" {
2185		reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
2186	}
2187	var body io.Reader = nil
2188	c.urlParams_.Set("alt", alt)
2189	c.urlParams_.Set("prettyPrint", "false")
2190	urls := googleapi.ResolveRelative(c.s.BasePath, "v1p1beta1/{+name}")
2191	urls += "?" + c.urlParams_.Encode()
2192	req, err := http.NewRequest("GET", urls, body)
2193	if err != nil {
2194		return nil, err
2195	}
2196	req.Header = reqHeaders
2197	googleapi.Expand(req.URL, map[string]string{
2198		"name": c.name,
2199	})
2200	return gensupport.SendRequest(c.ctx_, c.s.client, req)
2201}
2202
2203// Do executes the "speech.projects.operations.manualRecognitionTasks.get" call.
2204// Exactly one of *Operation or error will be non-nil. Any non-2xx
2205// status code is an error. Response headers are in either
2206// *Operation.ServerResponse.Header or (if a response was returned at
2207// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
2208// to check whether the returned error was because
2209// http.StatusNotModified was returned.
2210func (c *ProjectsOperationsManualRecognitionTasksGetCall) Do(opts ...googleapi.CallOption) (*Operation, error) {
2211	gensupport.SetOptions(c.urlParams_, opts...)
2212	res, err := c.doRequest("json")
2213	if res != nil && res.StatusCode == http.StatusNotModified {
2214		if res.Body != nil {
2215			res.Body.Close()
2216		}
2217		return nil, &googleapi.Error{
2218			Code:   res.StatusCode,
2219			Header: res.Header,
2220		}
2221	}
2222	if err != nil {
2223		return nil, err
2224	}
2225	defer googleapi.CloseBody(res)
2226	if err := googleapi.CheckResponse(res); err != nil {
2227		return nil, err
2228	}
2229	ret := &Operation{
2230		ServerResponse: googleapi.ServerResponse{
2231			Header:         res.Header,
2232			HTTPStatusCode: res.StatusCode,
2233		},
2234	}
2235	target := &ret
2236	if err := gensupport.DecodeResponse(target, res); err != nil {
2237		return nil, err
2238	}
2239	return ret, nil
2240	// {
2241	//   "description": "Gets the latest state of a long-running operation.  Clients can use this\nmethod to poll the operation result at intervals as recommended by the API\nservice.",
2242	//   "flatPath": "v1p1beta1/projects/{projectsId}/operations/manualRecognitionTasks/{manualRecognitionTasksId}",
2243	//   "httpMethod": "GET",
2244	//   "id": "speech.projects.operations.manualRecognitionTasks.get",
2245	//   "parameterOrder": [
2246	//     "name"
2247	//   ],
2248	//   "parameters": {
2249	//     "name": {
2250	//       "description": "The name of the operation resource.",
2251	//       "location": "path",
2252	//       "pattern": "^projects/[^/]+/operations/manualRecognitionTasks/[^/]+$",
2253	//       "required": true,
2254	//       "type": "string"
2255	//     }
2256	//   },
2257	//   "path": "v1p1beta1/{+name}",
2258	//   "response": {
2259	//     "$ref": "Operation"
2260	//   },
2261	//   "scopes": [
2262	//     "https://www.googleapis.com/auth/cloud-platform"
2263	//   ]
2264	// }
2265
2266}
2267
2268// method id "speech.speech.longrunningrecognize":
2269
2270type SpeechLongrunningrecognizeCall struct {
2271	s                           *Service
2272	longrunningrecognizerequest *LongRunningRecognizeRequest
2273	urlParams_                  gensupport.URLParams
2274	ctx_                        context.Context
2275	header_                     http.Header
2276}
2277
2278// Longrunningrecognize: Performs asynchronous speech recognition:
2279// receive results via the
2280// google.longrunning.Operations interface. Returns either
2281// an
2282// `Operation.error` or an `Operation.response` which contains
2283// a `LongRunningRecognizeResponse` message.
2284// For more information on asynchronous speech recognition, see
2285// the
2286// [how-to](https://cloud.google.com/speech-to-text/docs/async-recogn
2287// ize).
2288func (r *SpeechService) Longrunningrecognize(longrunningrecognizerequest *LongRunningRecognizeRequest) *SpeechLongrunningrecognizeCall {
2289	c := &SpeechLongrunningrecognizeCall{s: r.s, urlParams_: make(gensupport.URLParams)}
2290	c.longrunningrecognizerequest = longrunningrecognizerequest
2291	return c
2292}
2293
2294// Fields allows partial responses to be retrieved. See
2295// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
2296// for more information.
2297func (c *SpeechLongrunningrecognizeCall) Fields(s ...googleapi.Field) *SpeechLongrunningrecognizeCall {
2298	c.urlParams_.Set("fields", googleapi.CombineFields(s))
2299	return c
2300}
2301
2302// Context sets the context to be used in this call's Do method. Any
2303// pending HTTP request will be aborted if the provided context is
2304// canceled.
2305func (c *SpeechLongrunningrecognizeCall) Context(ctx context.Context) *SpeechLongrunningrecognizeCall {
2306	c.ctx_ = ctx
2307	return c
2308}
2309
2310// Header returns an http.Header that can be modified by the caller to
2311// add HTTP headers to the request.
2312func (c *SpeechLongrunningrecognizeCall) Header() http.Header {
2313	if c.header_ == nil {
2314		c.header_ = make(http.Header)
2315	}
2316	return c.header_
2317}
2318
2319func (c *SpeechLongrunningrecognizeCall) doRequest(alt string) (*http.Response, error) {
2320	reqHeaders := make(http.Header)
2321	for k, v := range c.header_ {
2322		reqHeaders[k] = v
2323	}
2324	reqHeaders.Set("User-Agent", c.s.userAgent())
2325	var body io.Reader = nil
2326	body, err := googleapi.WithoutDataWrapper.JSONReader(c.longrunningrecognizerequest)
2327	if err != nil {
2328		return nil, err
2329	}
2330	reqHeaders.Set("Content-Type", "application/json")
2331	c.urlParams_.Set("alt", alt)
2332	c.urlParams_.Set("prettyPrint", "false")
2333	urls := googleapi.ResolveRelative(c.s.BasePath, "v1p1beta1/speech:longrunningrecognize")
2334	urls += "?" + c.urlParams_.Encode()
2335	req, err := http.NewRequest("POST", urls, body)
2336	if err != nil {
2337		return nil, err
2338	}
2339	req.Header = reqHeaders
2340	return gensupport.SendRequest(c.ctx_, c.s.client, req)
2341}
2342
2343// Do executes the "speech.speech.longrunningrecognize" call.
2344// Exactly one of *Operation or error will be non-nil. Any non-2xx
2345// status code is an error. Response headers are in either
2346// *Operation.ServerResponse.Header or (if a response was returned at
2347// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
2348// to check whether the returned error was because
2349// http.StatusNotModified was returned.
2350func (c *SpeechLongrunningrecognizeCall) Do(opts ...googleapi.CallOption) (*Operation, error) {
2351	gensupport.SetOptions(c.urlParams_, opts...)
2352	res, err := c.doRequest("json")
2353	if res != nil && res.StatusCode == http.StatusNotModified {
2354		if res.Body != nil {
2355			res.Body.Close()
2356		}
2357		return nil, &googleapi.Error{
2358			Code:   res.StatusCode,
2359			Header: res.Header,
2360		}
2361	}
2362	if err != nil {
2363		return nil, err
2364	}
2365	defer googleapi.CloseBody(res)
2366	if err := googleapi.CheckResponse(res); err != nil {
2367		return nil, err
2368	}
2369	ret := &Operation{
2370		ServerResponse: googleapi.ServerResponse{
2371			Header:         res.Header,
2372			HTTPStatusCode: res.StatusCode,
2373		},
2374	}
2375	target := &ret
2376	if err := gensupport.DecodeResponse(target, res); err != nil {
2377		return nil, err
2378	}
2379	return ret, nil
2380	// {
2381	//   "description": "Performs asynchronous speech recognition: receive results via the\ngoogle.longrunning.Operations interface. Returns either an\n`Operation.error` or an `Operation.response` which contains\na `LongRunningRecognizeResponse` message.\nFor more information on asynchronous speech recognition, see the\n[how-to](https://cloud.google.com/speech-to-text/docs/async-recognize).",
2382	//   "flatPath": "v1p1beta1/speech:longrunningrecognize",
2383	//   "httpMethod": "POST",
2384	//   "id": "speech.speech.longrunningrecognize",
2385	//   "parameterOrder": [],
2386	//   "parameters": {},
2387	//   "path": "v1p1beta1/speech:longrunningrecognize",
2388	//   "request": {
2389	//     "$ref": "LongRunningRecognizeRequest"
2390	//   },
2391	//   "response": {
2392	//     "$ref": "Operation"
2393	//   },
2394	//   "scopes": [
2395	//     "https://www.googleapis.com/auth/cloud-platform"
2396	//   ]
2397	// }
2398
2399}
2400
2401// method id "speech.speech.recognize":
2402
2403type SpeechRecognizeCall struct {
2404	s                *Service
2405	recognizerequest *RecognizeRequest
2406	urlParams_       gensupport.URLParams
2407	ctx_             context.Context
2408	header_          http.Header
2409}
2410
2411// Recognize: Performs synchronous speech recognition: receive results
2412// after all audio
2413// has been sent and processed.
2414func (r *SpeechService) Recognize(recognizerequest *RecognizeRequest) *SpeechRecognizeCall {
2415	c := &SpeechRecognizeCall{s: r.s, urlParams_: make(gensupport.URLParams)}
2416	c.recognizerequest = recognizerequest
2417	return c
2418}
2419
2420// Fields allows partial responses to be retrieved. See
2421// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
2422// for more information.
2423func (c *SpeechRecognizeCall) Fields(s ...googleapi.Field) *SpeechRecognizeCall {
2424	c.urlParams_.Set("fields", googleapi.CombineFields(s))
2425	return c
2426}
2427
2428// Context sets the context to be used in this call's Do method. Any
2429// pending HTTP request will be aborted if the provided context is
2430// canceled.
2431func (c *SpeechRecognizeCall) Context(ctx context.Context) *SpeechRecognizeCall {
2432	c.ctx_ = ctx
2433	return c
2434}
2435
2436// Header returns an http.Header that can be modified by the caller to
2437// add HTTP headers to the request.
2438func (c *SpeechRecognizeCall) Header() http.Header {
2439	if c.header_ == nil {
2440		c.header_ = make(http.Header)
2441	}
2442	return c.header_
2443}
2444
2445func (c *SpeechRecognizeCall) doRequest(alt string) (*http.Response, error) {
2446	reqHeaders := make(http.Header)
2447	for k, v := range c.header_ {
2448		reqHeaders[k] = v
2449	}
2450	reqHeaders.Set("User-Agent", c.s.userAgent())
2451	var body io.Reader = nil
2452	body, err := googleapi.WithoutDataWrapper.JSONReader(c.recognizerequest)
2453	if err != nil {
2454		return nil, err
2455	}
2456	reqHeaders.Set("Content-Type", "application/json")
2457	c.urlParams_.Set("alt", alt)
2458	c.urlParams_.Set("prettyPrint", "false")
2459	urls := googleapi.ResolveRelative(c.s.BasePath, "v1p1beta1/speech:recognize")
2460	urls += "?" + c.urlParams_.Encode()
2461	req, err := http.NewRequest("POST", urls, body)
2462	if err != nil {
2463		return nil, err
2464	}
2465	req.Header = reqHeaders
2466	return gensupport.SendRequest(c.ctx_, c.s.client, req)
2467}
2468
2469// Do executes the "speech.speech.recognize" call.
2470// Exactly one of *RecognizeResponse or error will be non-nil. Any
2471// non-2xx status code is an error. Response headers are in either
2472// *RecognizeResponse.ServerResponse.Header or (if a response was
2473// returned at all) in error.(*googleapi.Error).Header. Use
2474// googleapi.IsNotModified to check whether the returned error was
2475// because http.StatusNotModified was returned.
2476func (c *SpeechRecognizeCall) Do(opts ...googleapi.CallOption) (*RecognizeResponse, error) {
2477	gensupport.SetOptions(c.urlParams_, opts...)
2478	res, err := c.doRequest("json")
2479	if res != nil && res.StatusCode == http.StatusNotModified {
2480		if res.Body != nil {
2481			res.Body.Close()
2482		}
2483		return nil, &googleapi.Error{
2484			Code:   res.StatusCode,
2485			Header: res.Header,
2486		}
2487	}
2488	if err != nil {
2489		return nil, err
2490	}
2491	defer googleapi.CloseBody(res)
2492	if err := googleapi.CheckResponse(res); err != nil {
2493		return nil, err
2494	}
2495	ret := &RecognizeResponse{
2496		ServerResponse: googleapi.ServerResponse{
2497			Header:         res.Header,
2498			HTTPStatusCode: res.StatusCode,
2499		},
2500	}
2501	target := &ret
2502	if err := gensupport.DecodeResponse(target, res); err != nil {
2503		return nil, err
2504	}
2505	return ret, nil
2506	// {
2507	//   "description": "Performs synchronous speech recognition: receive results after all audio\nhas been sent and processed.",
2508	//   "flatPath": "v1p1beta1/speech:recognize",
2509	//   "httpMethod": "POST",
2510	//   "id": "speech.speech.recognize",
2511	//   "parameterOrder": [],
2512	//   "parameters": {},
2513	//   "path": "v1p1beta1/speech:recognize",
2514	//   "request": {
2515	//     "$ref": "RecognizeRequest"
2516	//   },
2517	//   "response": {
2518	//     "$ref": "RecognizeResponse"
2519	//   },
2520	//   "scopes": [
2521	//     "https://www.googleapis.com/auth/cloud-platform"
2522	//   ]
2523	// }
2524
2525}
2526