|
1 | | -import { BigQuery } from '@google-cloud/bigquery' |
| 1 | +import { DNS } from '@google-cloud/dns' |
| 2 | +import bigquery from '@google-cloud/bigquery/build/src/types' |
2 | 3 | import CloudGraph from '@cloudgraph/sdk' |
3 | 4 | import groupBy from 'lodash/groupBy' |
| 5 | +import isEmpty from 'lodash/isEmpty' |
4 | 6 | import gcpLoggerText from '../../properties/logger' |
5 | | -import { GcpServiceInput } from '../../types' |
| 7 | +import { GcpCredentials, GcpServiceInput } from '../../types' |
6 | 8 | import { generateGcpErrorLog, initTestEndpoint } from '../../utils' |
7 | | -import { RawGcpBigQueryDataset } from './types' |
8 | | -import { MULTI_REGIONS } from '../../config/constants' |
| 9 | +import { listData } from '../../utils/fetchUtils' |
9 | 10 |
|
10 | 11 | const lt = { ...gcpLoggerText } |
11 | 12 | const { logger } = CloudGraph |
12 | 13 | const serviceName = 'BigQuery Dataset' |
13 | 14 | const apiEndpoint = initTestEndpoint(serviceName) |
14 | 15 |
|
15 | | -export default async ({ |
16 | | - regions, |
17 | | - config, |
18 | | -}: GcpServiceInput): Promise<{ |
19 | | - [region: string]: RawGcpBigQueryDataset[] |
20 | | -}> => { |
21 | | - const bigQueryClient = new BigQuery({ ...config, apiEndpoint }) |
22 | | - const datasetsResult: RawGcpBigQueryDataset[] = [] |
23 | | - const { projectId } = config |
24 | | - const allRegions = regions.split(',').concat(MULTI_REGIONS) |
25 | | - try { |
26 | | - const dataSetIter = bigQueryClient.getDatasetsStream() |
27 | | - for await (const dataSetResponse of dataSetIter) { |
28 | | - if (allRegions.includes(dataSetResponse.location)) { |
29 | | - const dsMetaData = dataSetResponse.metadata |
30 | | - const result = { |
31 | | - ...dsMetaData, |
32 | | - region: dataSetResponse.location, |
33 | | - Labels: dataSetResponse.labels, |
34 | | - tables: [], |
35 | | - projectId, |
36 | | - } |
37 | | - try { |
38 | | - const tableIter = dataSetResponse.getTablesStream() |
39 | | - for await (const tableResponse of tableIter) { |
40 | | - result.tables.push(tableResponse.metadata) |
| 16 | +export interface RawGcpBigQueryDataset extends bigquery.IDataset { |
| 17 | + projectId: string |
| 18 | + region: string |
| 19 | + tables: RawGcpBigQueryTable[] |
| 20 | +} |
| 21 | + |
| 22 | +export interface RawGcpBigQueryTable extends bigquery.ITable { |
| 23 | + projectId: string |
| 24 | + region: string |
| 25 | +} |
| 26 | + |
| 27 | +export const listBigQueryDatasets = async ( |
| 28 | + config: GcpCredentials, |
| 29 | + datasetsResult: RawGcpBigQueryDataset[] |
| 30 | +): Promise<void> => |
| 31 | + new Promise(async resolve => { |
| 32 | + const { projectId } = config |
| 33 | + |
| 34 | + try { |
| 35 | + const service = new DNS({ ...config, apiEndpoint }) |
| 36 | + const dataSetlist = await listData({ |
| 37 | + service, |
| 38 | + apiUri: `https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets`, |
| 39 | + dataFieldName: 'datasets', |
| 40 | + }) |
| 41 | + |
| 42 | + for (const { datasetReference } of dataSetlist) { |
| 43 | + const dataSetResponse = await listData({ |
| 44 | + service, |
| 45 | + apiUri: `https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetReference?.datasetId}`, |
| 46 | + }) |
| 47 | + |
| 48 | + if (!isEmpty(dataSetResponse)) { |
| 49 | + const result = { |
| 50 | + ...dataSetResponse[0], |
| 51 | + region: dataSetResponse[0].location, |
| 52 | + tables: [], |
| 53 | + projectId, |
| 54 | + } |
| 55 | + |
| 56 | + const tableResponse = await listData({ |
| 57 | + service, |
| 58 | + apiUri: `https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetReference?.datasetId}/tables`, |
| 59 | + dataFieldName: 'tables', |
| 60 | + }) |
| 61 | + |
| 62 | + for (const table of tableResponse) { |
| 63 | + result.tables.push(table) |
41 | 64 | } |
42 | 65 | datasetsResult.push(result) |
43 | | - } catch (error) { |
44 | | - generateGcpErrorLog(serviceName, 'bigQuery:getTablesStream', error) |
45 | 66 | } |
46 | 67 | } |
| 68 | + } catch (error) { |
| 69 | + generateGcpErrorLog(serviceName, 'bigquery:datasets', error) |
47 | 70 | } |
48 | | - } catch (error) { |
49 | | - generateGcpErrorLog(serviceName, 'bigQuery:getDatasetsStream', error) |
50 | | - } |
| 71 | + resolve() |
| 72 | + }) |
51 | 73 |
|
52 | | - logger.debug(lt.foundResources(serviceName, datasetsResult.length)) |
53 | | - return groupBy(datasetsResult, 'region') |
54 | | -} |
| 74 | +export default async ({ |
| 75 | + config, |
| 76 | +}: GcpServiceInput): Promise<{ |
| 77 | + [region: string]: RawGcpBigQueryDataset[] |
| 78 | +}> => |
| 79 | + new Promise(async resolve => { |
| 80 | + const datasetsResult: RawGcpBigQueryDataset[] = [] |
| 81 | + |
| 82 | + await listBigQueryDatasets(config, datasetsResult) |
| 83 | + |
| 84 | + logger.debug(lt.foundResources(serviceName, datasetsResult.length)) |
| 85 | + resolve(groupBy(datasetsResult, 'region')) |
| 86 | + }) |
0 commit comments