@@ -81,8 +81,8 @@ static void resetSparkLabelsCache() {
8181
8282 private static Map <String , String > computeSparkLabels (ImmutableMap <String , String > conf ) {
8383 Map <String , String > sparkLabels = new HashMap <>();
84- getSparkAppId (conf ).ifPresent (p -> sparkLabels .put ("appId " , p ));
85- getSparkAppName (conf ).ifPresent (p -> sparkLabels .put ("appName " , p ));
84+ getSparkAppId (conf ).ifPresent (p -> sparkLabels .put ("app-id " , p ));
85+ getSparkAppName (conf ).ifPresent (p -> sparkLabels .put ("app-name " , p ));
8686 if (isDataprocRuntime ()) {
8787 sparkLabels .putAll (getGCPLabels (conf ));
8888 }
@@ -92,7 +92,7 @@ private static Map<String, String> computeSparkLabels(ImmutableMap<String, Strin
9292 static Map <String , String > getGCPLabels (ImmutableMap <String , String > conf ) {
9393 try (CloseableHttpClient httpClient = createHttpClient ()) {
9494 Map <String , String > gcpLabels = getResourceLabels (conf , httpClient );
95- getGCPProjectId (conf , httpClient ).ifPresent (p -> gcpLabels .put ("projectId " , p ));
95+ getGCPProjectId (conf , httpClient ).ifPresent (p -> gcpLabels .put ("project-id " , p ));
9696 getDataprocRegion (conf , httpClient ).ifPresent (p -> gcpLabels .put ("region" , p ));
9797 return gcpLabels ;
9898 } catch (IOException e ) {
@@ -105,27 +105,27 @@ private static Map<String, String> getResourceLabels(
105105 ImmutableMap <String , String > conf , CloseableHttpClient httpClient ) {
106106 Map <String , String > resourceLabels = new HashMap <>();
107107 if ("yarn" .equals (conf .getOrDefault (SPARK_MASTER , "" ))) {
108- getClusterName (conf ).ifPresent (p -> resourceLabels .put ("cluster. name" , p ));
109- getClusterUUID (conf , httpClient ).ifPresent (p -> resourceLabels .put ("cluster. uuid" , p ));
110- getDataprocJobID (conf ).ifPresent (p -> resourceLabels .put ("job. id" , p ));
111- getDataprocJobUUID (conf ).ifPresent (p -> resourceLabels .put ("job. uuid" , p ));
112- resourceLabels .put ("job. type" , "dataproc_job" );
108+ getClusterName (conf ).ifPresent (p -> resourceLabels .put ("cluster- name" , p ));
109+ getClusterUUID (conf , httpClient ).ifPresent (p -> resourceLabels .put ("cluster- uuid" , p ));
110+ getDataprocJobID (conf ).ifPresent (p -> resourceLabels .put ("job- id" , p ));
111+ getDataprocJobUUID (conf ).ifPresent (p -> resourceLabels .put ("job- uuid" , p ));
112+ resourceLabels .put ("job- type" , "dataproc_job" );
113113 return resourceLabels ;
114114 }
115115 Optional <String > dataprocBatchID = getDataprocBatchID (conf , httpClient );
116116 if (dataprocBatchID .isPresent ()) {
117- dataprocBatchID .ifPresent (p -> resourceLabels .put ("spark. batch. id" , p ));
117+ dataprocBatchID .ifPresent (p -> resourceLabels .put ("spark- batch- id" , p ));
118118 getDataprocBatchUUID (conf , httpClient )
119- .ifPresent (p -> resourceLabels .put ("spark. batch. uuid" , p ));
120- resourceLabels .put ("job. type" , "batch" );
119+ .ifPresent (p -> resourceLabels .put ("spark- batch- uuid" , p ));
120+ resourceLabels .put ("job- type" , "batch" );
121121 return resourceLabels ;
122122 }
123123 Optional <String > dataprocSessionID = getDataprocSessionID (conf , httpClient );
124124 if (dataprocSessionID .isPresent ()) {
125- dataprocSessionID .ifPresent (p -> resourceLabels .put ("spark. session. id" , p ));
125+ dataprocSessionID .ifPresent (p -> resourceLabels .put ("spark- session- id" , p ));
126126 getDataprocSessionUUID (conf , httpClient )
127- .ifPresent (p -> resourceLabels .put ("spark. session. uuid" , p ));
128- resourceLabels .put ("job. type" , "session" );
127+ .ifPresent (p -> resourceLabels .put ("spark- session- uuid" , p ));
128+ resourceLabels .put ("job- type" , "session" );
129129 return resourceLabels ;
130130 }
131131 return resourceLabels ;
0 commit comments