Skip to content

Commit 5cf4683

Browse files
james-zhou-inspire11tyler-dunkel
authored andcommitted
feat(dataproc): add connection cluster to jobs
1 parent a50fe18 commit 5cf4683

7 files changed

Lines changed: 59 additions & 8 deletions

File tree

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
import isEmpty from 'lodash/isEmpty'
2+
import { ServiceConnection } from '@cloudgraph/sdk'
3+
import { RawGcpDataprocCluster } from './data'
4+
import services from '../../enums/services'
5+
6+
export default ({
7+
service,
8+
data,
9+
region,
10+
}: {
11+
service: RawGcpDataprocCluster
12+
data: { name: string; data: { [property: string]: any[] } }[]
13+
region: string
14+
}): {
15+
[property: string]: ServiceConnection[]
16+
} => {
17+
const { id } = service
18+
const connections: ServiceConnection[] = []
19+
20+
/**
21+
* Find Dataproc Jobs
22+
*/
23+
const jobs: {
24+
name: string
25+
data: { [property: string]: any[] }
26+
} = data.find(({ name }) => name === services.dataprocJob)
27+
28+
if (jobs?.data?.[region]) {
29+
30+
const filtered = jobs.data[region].filter(
31+
({ placement }) => placement.clusterUuid === id
32+
)
33+
if (!isEmpty(filtered)) {
34+
for (const { id } of filtered) {
35+
connections.push({
36+
id,
37+
resourceType: services.dataprocJob,
38+
relation: 'child',
39+
field: 'dataprocJobs',
40+
})
41+
}
42+
}
43+
}
44+
45+
const result = {
46+
[id]: connections,
47+
}
48+
return result
49+
}

src/services/dataprocCluster/data.ts

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import cuid from 'cuid'
21
import { ClusterControllerClient } from '@google-cloud/dataproc'
32
import CloudGraph from '@cloudgraph/sdk'
43
import groupBy from 'lodash/groupBy'
@@ -11,7 +10,7 @@ const lt = { ...gcpLoggerText }
1110
const { logger } = CloudGraph
1211
const serviceName = 'Dataproc Cluster'
1312

14-
export interface RawGcpDataprocCluster extends Omit<google.cloud.dataproc.v1.ICluster, 'projectId' | 'labels'> {
13+
export interface RawGcpDataprocCluster extends Omit<google.cloud.dataproc.v1.ICluster, 'projectId' | 'clusterUuid' | 'labels'> {
1514
id: string
1615
region: string
1716
projectId: string
@@ -40,11 +39,11 @@ export default async ({
4039
})
4140

4241
const iterable = dataprocClient.listClustersAsync({ projectId, region })
43-
for await (const { labels, ...response } of iterable) {
42+
for await (const { clusterUuid, labels, ...response } of iterable) {
4443
if (response) {
4544
clusterList.push({
4645
...response,
47-
id: cuid(),
46+
id: clusterUuid,
4847
projectId,
4948
region,
5049
Labels: labels,

src/services/dataprocCluster/format.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -238,7 +238,6 @@ export default ({
238238
Labels,
239239
status = {},
240240
statusHistory = [],
241-
clusterUuid,
242241
metrics = {},
243242
} = service
244243

@@ -251,7 +250,6 @@ export default ({
251250
labels: formatLabelsFromMap(Labels),
252251
status: formatClusterStatus(status),
253252
statusHistory: statusHistory?.map(history => formatClusterStatus(history)),
254-
clusterUuid,
255253
hdfsMetrics: formatKeyValueMap(metrics?.hdfsMetrics || {}),
256254
yarnMetrics: formatKeyValueMap(metrics?.yarnMetrics || {}),
257255
}

src/services/dataprocCluster/index.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,15 @@ import {Service} from '@cloudgraph/sdk'
22
import BaseService from '../base'
33
import format from './format'
44
import getData from './data'
5+
import getConnections from './connections'
56
import mutation from './mutation'
67

78
export default class GcpDataprocCluster extends BaseService implements Service {
89
format = format.bind(this)
910

1011
getData = getData.bind(this)
1112

13+
getConnections = getConnections.bind(this)
14+
1215
mutation = mutation
1316
}

src/services/dataprocCluster/schema.graphql

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -194,8 +194,8 @@ type gcpDataprocCluster implements gcpBaseResource
194194
labels: [gcpRawLabel]
195195
status: gcpDataprocClusterStatus
196196
statusHistory: [gcpDataprocClusterStatus]
197-
clusterUuid: String @search(by: [hash, regexp])
198197
hdfsMetrics: [gcpDataprocClusterMetric]
199198
yarnMetrics: [gcpDataprocClusterMetric]
199+
dataprocJobs: [gcpDataprocJob] @hasInverse(field: dataprocClusters)
200200
project: [gcpProject] @hasInverse(field: dataprocClusters)
201201
}

src/services/dataprocJob/schema.graphql

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -173,5 +173,6 @@ type gcpDataprocJob implements gcpBaseResource
173173
schedulingMaxFailuresTotal: Int @search
174174
done: Boolean @search
175175
labels: [gcpRawLabel]
176+
dataprocClusters: [gcpDataprocCluster] @hasInverse(field: dataprocJobs)
176177
project: [gcpProject] @hasInverse(field: dataprocJobs)
177178
}

src/types/generated.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -876,8 +876,8 @@ export type GcpDataprocAutoscalingPolicy = GcpBaseResource & {
876876
};
877877

878878
export type GcpDataprocCluster = GcpBaseResource & {
879-
clusterUuid?: Maybe<Scalars['String']>;
880879
config?: Maybe<GcpDataprocClusterConfig>;
880+
dataprocJobs?: Maybe<Array<Maybe<GcpDataprocJob>>>;
881881
hdfsMetrics?: Maybe<Array<Maybe<GcpDataprocClusterMetric>>>;
882882
labels?: Maybe<Array<Maybe<GcpRawLabel>>>;
883883
project?: Maybe<Array<Maybe<GcpProject>>>;
@@ -1029,6 +1029,7 @@ export type GcpDataprocInstanceClusterConfigAccelerator = {
10291029
};
10301030

10311031
export type GcpDataprocJob = GcpBaseResource & {
1032+
dataprocClusters?: Maybe<Array<Maybe<GcpDataprocCluster>>>;
10321033
done?: Maybe<Scalars['Boolean']>;
10331034
driverControlFilesUri?: Maybe<Scalars['String']>;
10341035
driverOutputResourceUri?: Maybe<Scalars['String']>;

0 commit comments

Comments
 (0)