summaryrefslogtreecommitdiff
path: root/lib/ansible/modules/cloud/google/gcp_bigquery_dataset_info.py
blob: bfb18f970e68b1c8388c0509d09ff61deebc1c2b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
#     ***     AUTO GENERATED CODE    ***    AUTO GENERATED CODE     ***
#
# ----------------------------------------------------------------------------
#
#     This file is automatically generated by Magic Modules and manual
#     changes will be clobbered when the file is regenerated.
#
#     Please read more about how to change this file at
#     https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------

from __future__ import absolute_import, division, print_function

__metaclass__ = type

################################################################################
# Documentation
################################################################################

ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}

DOCUMENTATION = '''
---
module: gcp_bigquery_dataset_info
description:
- Gather info for GCP Dataset
short_description: Gather info for GCP Dataset
version_added: '2.8'
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
  project:
    description:
    - The Google Cloud Platform project to use.
    type: str
  auth_kind:
    description:
    - The type of credential used.
    type: str
    required: true
    choices:
    - application
    - machineaccount
    - serviceaccount
  service_account_contents:
    description:
    - The contents of a Service Account JSON file, either in a dictionary or as a
      JSON string that represents it.
    type: jsonarg
  service_account_file:
    description:
    - The path of a Service Account JSON file if serviceaccount is selected as type.
    type: path
  service_account_email:
    description:
    - An optional service account email address if machineaccount is selected and
      the user does not wish to use the default email.
    type: str
  scopes:
    description:
    - Array of scopes to be used
    type: list
  env_type:
    description:
    - Specifies which Ansible environment you're running this module within.
    - This should not be set unless you know what you're doing.
    - This only alters the User Agent string for any API requests.
    type: str
notes:
- for authentication, you can set service_account_file using the C(gcp_service_account_file)
  env variable.
- for authentication, you can set service_account_contents using the C(GCP_SERVICE_ACCOUNT_CONTENTS)
  env variable.
- For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL)
  env variable.
- For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable.
- For authentication, you can set scopes using the C(GCP_SCOPES) env variable.
- Environment variables values will only be used if the playbook values are not set.
- The I(service_account_email) and I(service_account_file) options are mutually exclusive.
'''

EXAMPLES = '''
- name: get info on a dataset
  gcp_bigquery_dataset_info:
    project: test_project
    auth_kind: serviceaccount
    service_account_file: "/tmp/auth.pem"
'''

RETURN = '''
resources:
  description: List of resources
  returned: always
  type: complex
  contains:
    name:
      description:
      - Dataset name.
      returned: success
      type: str
    access:
      description:
      - An array of objects that define dataset access for one or more entities.
      returned: success
      type: complex
      contains:
        domain:
          description:
          - A domain to grant access to. Any users signed in with the domain specified
            will be granted the specified access .
          returned: success
          type: str
        groupByEmail:
          description:
          - An email address of a Google Group to grant access to.
          returned: success
          type: str
        role:
          description:
          - Describes the rights granted to the user specified by the other member
            of the access object. Primitive, Predefined and custom roles are supported.
            Predefined roles that have equivalent primitive roles are swapped by the
            API to their Primitive counterparts, and will show a diff post-create.
            See [official docs](U(https://cloud.google.com/bigquery/docs/access-control)).
          returned: success
          type: str
        specialGroup:
          description:
          - A special group to grant access to.
          - 'Possible values include: * `projectOwners`: Owners of the enclosing project.'
          - "* `projectReaders`: Readers of the enclosing project."
          - "* `projectWriters`: Writers of the enclosing project."
          - "* `allAuthenticatedUsers`: All authenticated BigQuery users. ."
          returned: success
          type: str
        userByEmail:
          description:
          - 'An email address of a user to grant access to. For example: fred@example.com
            .'
          returned: success
          type: str
        view:
          description:
          - A view from a different dataset to grant access to. Queries executed against
            that view will have read access to tables in this dataset. The role field
            is not required when this field is set. If that view is updated by any
            user, access to the view needs to be granted again via an update operation.
          returned: success
          type: complex
          contains:
            datasetId:
              description:
              - The ID of the dataset containing this table.
              returned: success
              type: str
            projectId:
              description:
              - The ID of the project containing this table.
              returned: success
              type: str
            tableId:
              description:
              - The ID of the table. The ID must contain only letters (a-z, A-Z),
                numbers (0-9), or underscores. The maximum length is 1,024 characters.
              returned: success
              type: str
    creationTime:
      description:
      - The time when this dataset was created, in milliseconds since the epoch.
      returned: success
      type: int
    datasetReference:
      description:
      - A reference that identifies the dataset.
      returned: success
      type: complex
      contains:
        datasetId:
          description:
          - A unique ID for this dataset, without the project name. The ID must contain
            only letters (a-z, A-Z), numbers (0-9), or underscores. The maximum length
            is 1,024 characters.
          returned: success
          type: str
        projectId:
          description:
          - The ID of the project containing this dataset.
          returned: success
          type: str
    defaultTableExpirationMs:
      description:
      - The default lifetime of all tables in the dataset, in milliseconds.
      - The minimum value is 3600000 milliseconds (one hour).
      - Once this property is set, all newly-created tables in the dataset will have
        an `expirationTime` property set to the creation time plus the value in this
        property, and changing the value will only affect new tables, not existing
        ones. When the `expirationTime` for a given table is reached, that table will
        be deleted automatically.
      - If a table's `expirationTime` is modified or removed before the table expires,
        or if you provide an explicit `expirationTime` when creating a table, that
        value takes precedence over the default expiration time indicated by this
        property.
      returned: success
      type: int
    defaultPartitionExpirationMs:
      description:
      - The default partition expiration for all partitioned tables in the dataset,
        in milliseconds.
      - Once this property is set, all newly-created partitioned tables in the dataset
        will have an `expirationMs` property in the `timePartitioning` settings set
        to this value, and changing the value will only affect new tables, not existing
        ones. The storage in a partition will have an expiration time of its partition
        time plus this value.
      - 'Setting this property overrides the use of `defaultTableExpirationMs` for
        partitioned tables: only one of `defaultTableExpirationMs` and `defaultPartitionExpirationMs`
        will be used for any new partitioned table. If you provide an explicit `timePartitioning.expirationMs`
        when creating or updating a partitioned table, that value takes precedence
        over the default partition expiration time indicated by this property.'
      returned: success
      type: int
    description:
      description:
      - A user-friendly description of the dataset.
      returned: success
      type: str
    etag:
      description:
      - A hash of the resource.
      returned: success
      type: str
    friendlyName:
      description:
      - A descriptive name for the dataset.
      returned: success
      type: str
    id:
      description:
      - The fully-qualified unique name of the dataset in the format projectId:datasetId.
        The dataset name without the project name is given in the datasetId field
        .
      returned: success
      type: str
    labels:
      description:
      - The labels associated with this dataset. You can use these to organize and
        group your datasets .
      returned: success
      type: dict
    lastModifiedTime:
      description:
      - The date when this dataset or any of its tables was last modified, in milliseconds
        since the epoch.
      returned: success
      type: int
    location:
      description:
      - The geographic location where the dataset should reside.
      - See [official docs](U(https://cloud.google.com/bigquery/docs/dataset-locations)).
      - There are two types of locations, regional or multi-regional. A regional location
        is a specific geographic place, such as Tokyo, and a multi-regional location
        is a large geographic area, such as the United States, that contains at least
        two geographic places.
      - 'Possible regional values include: `asia-east1`, `asia-northeast1`, `asia-southeast1`,
        `australia-southeast1`, `europe-north1`, `europe-west2` and `us-east4`.'
      - 'Possible multi-regional values: `EU` and `US`.'
      - The default value is multi-regional location `US`.
      - Changing this forces a new resource to be created.
      returned: success
      type: str
    defaultEncryptionConfiguration:
      description:
      - The default encryption key for all tables in the dataset. Once this property
        is set, all newly-created partitioned tables in the dataset will have encryption
        key set to this value, unless table creation request (or query) overrides
        the key.
      returned: success
      type: complex
      contains:
        kmsKeyName:
          description:
          - Describes the Cloud KMS encryption key that will be used to protect destination
            BigQuery table. The BigQuery Service Account associated with your project
            requires access to this encryption key.
          returned: success
          type: str
'''

################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest
import json

################################################################################
# Main
################################################################################


def main():
    module = GcpModule(argument_spec=dict())

    if not module.params['scopes']:
        module.params['scopes'] = ['https://www.googleapis.com/auth/bigquery']

    return_value = {'resources': fetch_list(module, collection(module))}
    module.exit_json(**return_value)


def collection(module):
    return "https://www.googleapis.com/bigquery/v2/projects/{project}/datasets".format(**module.params)


def fetch_list(module, link):
    auth = GcpSession(module, 'bigquery')
    return auth.list(link, return_if_object, array_name='datasets')


def return_if_object(module, response):
    # If not found, return nothing.
    if response.status_code == 404:
        return None

    # If no content, return nothing.
    if response.status_code == 204:
        return None

    try:
        module.raise_for_status(response)
        result = response.json()
    except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
        module.fail_json(msg="Invalid JSON response with error: %s" % inst)

    if navigate_hash(result, ['error', 'errors']):
        module.fail_json(msg=navigate_hash(result, ['error', 'errors']))

    return result


if __name__ == "__main__":
    main()