1#!/usr/bin/python
2# -*- coding: utf-8 -*-
3#
4# Copyright (C) 2017 Google
5# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
6# ----------------------------------------------------------------------------
7#
8#     ***     AUTO GENERATED CODE    ***    AUTO GENERATED CODE     ***
9#
10# ----------------------------------------------------------------------------
11#
12#     This file is automatically generated by Magic Modules and manual
13#     changes will be clobbered when the file is regenerated.
14#
15#     Please read more about how to change this file at
16#     https://www.github.com/GoogleCloudPlatform/magic-modules
17#
18# ----------------------------------------------------------------------------
19
20from __future__ import absolute_import, division, print_function
21
22__metaclass__ = type
23
24################################################################################
25# Documentation
26################################################################################
27
28ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
29
30DOCUMENTATION = '''
31---
32module: gcp_bigquery_dataset_info
33description:
34- Gather info for GCP Dataset
35- This module was called C(gcp_bigquery_dataset_facts) before Ansible 2.9. The usage
36  has not changed.
37short_description: Gather info for GCP Dataset
38version_added: 2.8
39author: Google Inc. (@googlecloudplatform)
40requirements:
41- python >= 2.6
42- requests >= 2.18.4
43- google-auth >= 1.3.0
44options: {}
45extends_documentation_fragment: gcp
46'''
47
48EXAMPLES = '''
49- name: get info on a dataset
50  gcp_bigquery_dataset_info:
51    project: test_project
52    auth_kind: serviceaccount
53    service_account_file: "/tmp/auth.pem"
54'''
55
56RETURN = '''
57resources:
58  description: List of resources
59  returned: always
60  type: complex
61  contains:
62    name:
63      description:
64      - Dataset name.
65      returned: success
66      type: str
67    access:
68      description:
69      - An array of objects that define dataset access for one or more entities.
70      returned: success
71      type: complex
72      contains:
73        domain:
74          description:
75          - A domain to grant access to. Any users signed in with the domain specified
76            will be granted the specified access .
77          returned: success
78          type: str
79        groupByEmail:
80          description:
81          - An email address of a Google Group to grant access to.
82          returned: success
83          type: str
84        role:
85          description:
86          - Describes the rights granted to the user specified by the other member
87            of the access object. Primitive, Predefined and custom roles are supported.
88            Predefined roles that have equivalent primitive roles are swapped by the
89            API to their Primitive counterparts, and will show a diff post-create.
90            See [official docs](U(https://cloud.google.com/bigquery/docs/access-control)).
91          returned: success
92          type: str
93        specialGroup:
94          description:
95          - A special group to grant access to.
96          - 'Possible values include: * `projectOwners`: Owners of the enclosing project.'
97          - "* `projectReaders`: Readers of the enclosing project."
98          - "* `projectWriters`: Writers of the enclosing project."
99          - "* `allAuthenticatedUsers`: All authenticated BigQuery users. ."
100          returned: success
101          type: str
102        userByEmail:
103          description:
104          - 'An email address of a user to grant access to. For example: fred@example.com
105            .'
106          returned: success
107          type: str
108        view:
109          description:
110          - A view from a different dataset to grant access to. Queries executed against
111            that view will have read access to tables in this dataset. The role field
112            is not required when this field is set. If that view is updated by any
113            user, access to the view needs to be granted again via an update operation.
114          returned: success
115          type: complex
116          contains:
117            datasetId:
118              description:
119              - The ID of the dataset containing this table.
120              returned: success
121              type: str
122            projectId:
123              description:
124              - The ID of the project containing this table.
125              returned: success
126              type: str
127            tableId:
128              description:
129              - The ID of the table. The ID must contain only letters (a-z, A-Z),
130                numbers (0-9), or underscores. The maximum length is 1,024 characters.
131              returned: success
132              type: str
133    creationTime:
134      description:
135      - The time when this dataset was created, in milliseconds since the epoch.
136      returned: success
137      type: int
138    datasetReference:
139      description:
140      - A reference that identifies the dataset.
141      returned: success
142      type: complex
143      contains:
144        datasetId:
145          description:
146          - A unique ID for this dataset, without the project name. The ID must contain
147            only letters (a-z, A-Z), numbers (0-9), or underscores. The maximum length
148            is 1,024 characters.
149          returned: success
150          type: str
151        projectId:
152          description:
153          - The ID of the project containing this dataset.
154          returned: success
155          type: str
156    defaultTableExpirationMs:
157      description:
158      - The default lifetime of all tables in the dataset, in milliseconds.
159      - The minimum value is 3600000 milliseconds (one hour).
160      - Once this property is set, all newly-created tables in the dataset will have
161        an `expirationTime` property set to the creation time plus the value in this
162        property, and changing the value will only affect new tables, not existing
163        ones. When the `expirationTime` for a given table is reached, that table will
164        be deleted automatically.
165      - If a table's `expirationTime` is modified or removed before the table expires,
166        or if you provide an explicit `expirationTime` when creating a table, that
167        value takes precedence over the default expiration time indicated by this
168        property.
169      returned: success
170      type: int
171    defaultPartitionExpirationMs:
172      description:
173      - The default partition expiration for all partitioned tables in the dataset,
174        in milliseconds.
175      - Once this property is set, all newly-created partitioned tables in the dataset
176        will have an `expirationMs` property in the `timePartitioning` settings set
177        to this value, and changing the value will only affect new tables, not existing
178        ones. The storage in a partition will have an expiration time of its partition
179        time plus this value.
180      - 'Setting this property overrides the use of `defaultTableExpirationMs` for
181        partitioned tables: only one of `defaultTableExpirationMs` and `defaultPartitionExpirationMs`
182        will be used for any new partitioned table. If you provide an explicit `timePartitioning.expirationMs`
183        when creating or updating a partitioned table, that value takes precedence
184        over the default partition expiration time indicated by this property.'
185      returned: success
186      type: int
187    description:
188      description:
189      - A user-friendly description of the dataset.
190      returned: success
191      type: str
192    etag:
193      description:
194      - A hash of the resource.
195      returned: success
196      type: str
197    friendlyName:
198      description:
199      - A descriptive name for the dataset.
200      returned: success
201      type: str
202    id:
203      description:
204      - The fully-qualified unique name of the dataset in the format projectId:datasetId.
205        The dataset name without the project name is given in the datasetId field
206        .
207      returned: success
208      type: str
209    labels:
210      description:
211      - The labels associated with this dataset. You can use these to organize and
212        group your datasets .
213      returned: success
214      type: dict
215    lastModifiedTime:
216      description:
217      - The date when this dataset or any of its tables was last modified, in milliseconds
218        since the epoch.
219      returned: success
220      type: int
221    location:
222      description:
223      - The geographic location where the dataset should reside.
224      - See [official docs](U(https://cloud.google.com/bigquery/docs/dataset-locations)).
225      - There are two types of locations, regional or multi-regional. A regional location
226        is a specific geographic place, such as Tokyo, and a multi-regional location
227        is a large geographic area, such as the United States, that contains at least
228        two geographic places.
229      - 'Possible regional values include: `asia-east1`, `asia-northeast1`, `asia-southeast1`,
230        `australia-southeast1`, `europe-north1`, `europe-west2` and `us-east4`.'
231      - 'Possible multi-regional values: `EU` and `US`.'
232      - The default value is multi-regional location `US`.
233      - Changing this forces a new resource to be created.
234      returned: success
235      type: str
236'''
237
238################################################################################
239# Imports
240################################################################################
241from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest
242import json
243
244################################################################################
245# Main
246################################################################################
247
248
249def main():
250    module = GcpModule(argument_spec=dict())
251
252    if module._name == 'gcp_bigquery_dataset_facts':
253        module.deprecate("The 'gcp_bigquery_dataset_facts' module has been renamed to 'gcp_bigquery_dataset_info'", version='2.13')
254
255    if not module.params['scopes']:
256        module.params['scopes'] = ['https://www.googleapis.com/auth/bigquery']
257
258    return_value = {'resources': fetch_list(module, collection(module))}
259    module.exit_json(**return_value)
260
261
262def collection(module):
263    return "https://www.googleapis.com/bigquery/v2/projects/{project}/datasets".format(**module.params)
264
265
266def fetch_list(module, link):
267    auth = GcpSession(module, 'bigquery')
268    return auth.list(link, return_if_object, array_name='datasets')
269
270
271def return_if_object(module, response):
272    # If not found, return nothing.
273    if response.status_code == 404:
274        return None
275
276    # If no content, return nothing.
277    if response.status_code == 204:
278        return None
279
280    try:
281        module.raise_for_status(response)
282        result = response.json()
283    except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
284        module.fail_json(msg="Invalid JSON response with error: %s" % inst)
285
286    if navigate_hash(result, ['error', 'errors']):
287        module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
288
289    return result
290
291
292if __name__ == "__main__":
293    main()
294