Skip to content

Commit 3d32042

Browse files
authored
Merge branch 'datahub-project:master' into master
2 parents a24acb6 + cfd891b commit 3d32042

File tree

22 files changed

+784
-335
lines changed

22 files changed

+784
-335
lines changed

.github/workflows/docker-unified.yml

+277-304
Large diffs are not rendered by default.

datahub-frontend/build.gradle

+1-1
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ task unversionZip(type: Copy, dependsOn: [':datahub-web-react:distZip', distZip]
7676

7777
docker {
7878
dependsOn(stageMainDist)
79-
name "${docker_registry}/${docker_repo}:v${version}"
79+
name "${docker_registry}/${docker_repo}:${versionTag}"
8080
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
8181
files "${buildDir}/stage"
8282
files fileTree(rootProject.projectDir) {

datahub-upgrade/build.gradle

+1-1
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,7 @@ task runNoCode(type: Exec) {
172172

173173
docker {
174174
dependsOn(bootJar)
175-
name "${docker_registry}/${docker_repo}:v${version}"
175+
name "${docker_registry}/${docker_repo}:${versionTag}"
176176
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
177177
files bootJar.outputs.files
178178
files fileTree(rootProject.projectDir) {

docker/build.gradle

+1
Original file line numberDiff line numberDiff line change
@@ -257,6 +257,7 @@ quickstart_configs.each { taskName, config ->
257257
// Only restart containers that had their modules rebuilt
258258
if (containersToRestart) {
259259
def cmd = ["docker compose -p datahub --profile ${config.profile}"] + ['-f', compose_base] + ['restart'] + containersToRestart
260+
println(cmd.join(" "))
260261
commandLine 'bash', '-c', cmd.join(" ")
261262
} else {
262263
// If no containers need restart, make this a no-op

docker/datahub-ingestion-base/build.gradle

+1-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,7 @@ ext {
1717

1818
docker {
1919
dependsOn build
20-
name "${docker_registry}/${docker_repo}:v${docker_version}"
21-
//version "v${docker_version}"
20+
name "${docker_registry}/${docker_repo}:${docker_version}"
2221
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
2322
files fileTree(rootProject.projectDir) {
2423
include '.dockerignore'

docker/datahub-ingestion/build.gradle

+1-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ dependencies {
2222

2323
docker {
2424
dependsOn 'build', ':docker:datahub-ingestion-base:docker', ':metadata-ingestion:codegen'
25-
name "${docker_registry}/${docker_repo}:v${docker_version}"
25+
name "${docker_registry}/${docker_repo}:${docker_version}"
2626
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile${docker_target == "slim" ? "-slim-only" : ""}")
2727
files fileTree(rootProject.projectDir) {
2828
include '.dockerignore'

docker/elasticsearch-setup/build.gradle

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ ext {
1212

1313
docker {
1414
dependsOn(build)
15-
name "${docker_registry}/${docker_repo}:v${version}"
15+
name "${docker_registry}/${docker_repo}:${versionTag}"
1616
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
1717
files fileTree(rootProject.projectDir) {
1818
include '.dockerignore'

docker/kafka-setup/build.gradle

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ ext {
1212

1313
docker {
1414
dependsOn(build)
15-
name "${docker_registry}/${docker_repo}:v${version}"
15+
name "${docker_registry}/${docker_repo}:${versionTag}"
1616
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
1717
files fileTree(rootProject.projectDir) {
1818
include '.dockerignore'

docker/mysql-setup/build.gradle

+1-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ ext {
1313

1414
docker {
1515
dependsOn build
16-
name "${docker_registry}/${docker_repo}:v${version}"
16+
name "${docker_registry}/${docker_repo}:${versionTag}"
1717
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
1818
files fileTree(rootProject.projectDir) {
1919
include '.dockerignore'

docker/postgres-setup/build.gradle

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ ext {
1212
}
1313

1414
docker {
15-
name "${docker_registry}/${docker_repo}:v${version}"
15+
name "${docker_registry}/${docker_repo}:${versionTag}"
1616
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
1717
files fileTree(rootProject.projectDir) {
1818
include '.dockerignore'

docs-website/sidebars.js

+1
Original file line numberDiff line numberDiff line change
@@ -367,6 +367,7 @@ module.exports = {
367367
},
368368
{
369369
"DataHub Cloud Release History": [
370+
"docs/managed-datahub/release-notes/v_0_3_10",
370371
"docs/managed-datahub/release-notes/v_0_3_9",
371372
"docs/managed-datahub/release-notes/v_0_3_8",
372373
"docs/managed-datahub/release-notes/v_0_3_7",
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
# v0.3.10
2+
---
3+
4+
Release Availability Date
5+
---
6+
4-April-2025
7+
8+
Recommended CLI/SDK
9+
---
10+
1.0.0.1
11+
12+
Helm Chart
13+
---
14+
1.5.20
15+
16+
## Known Issues
17+
18+
- API Tracing: Some aspects (i.e. `siblings`) which can be generated by system hooks are reported as a trace error if the system aspect and ingestion are creating conflicting aspects.
19+
20+
## Release Changelog
21+
---
22+
23+
### v0.3.10
24+
25+
- All changes in https://github.com/datahub-project/datahub/releases/tag/v1.0.0
26+
27+
- Breaking Changes
28+
29+
- Bug Fixes
30+
- [Schema Tab] Render rich text in the V1 experience where you can click on links instead of having to click "View more"
31+
- [Subscriptions] Fix edge case where we were not showing display names for users or groups properly in subscription summaries
32+
- [Nav Bar] Fix the styling of the logo in the new nav bar header experience in Safari
33+
- [Group Membership] Harden the UI to prevent errors when showing group memberships on a user profile page
34+
- [Structured Properties] Allow users to search for Glossary Nodes when applying asset values to structured properties
35+
- [Lineage] Hide a few unwanted and unneeded filters in the lineage list view from the UI
36+
- [Lineage] Remove restriction on per-hop node limit, causing some results to be hidden.
37+
- [Lineage] Fix an issue where indirect upstream lineage was incomplete for certain entity types like Charts. Both Impact Analysis & Lineage Graph. This was also causing Upstream Health sidebar section to underreport upstream assets with health problems.
38+
- [Queries Tab] Change the page size
39+
- [Search Filters] Adding support for a "Has Siblings" filter (disabled by default, available by request).
40+
41+
- Product
42+
43+
- **Navigation**: Our new simplified nav bar is now switched on by default in the V2 UI
44+
- **Custom Asset Actions**: Links create via the Documentation tab can now be elevated to display in page headers and search results. We've also added an `upsertLink` GraphQL endpoint for easier programmatic management of links.
45+
- **Proposals**: Entity profile pages now display the new proposal types: Ownership, Domains, and Structured Properties.
46+
- **Proposals**: Our redesigned proposals experience (Proposals 2.0) is now in beta (enabled by request)! Try out the improved interface for viewing proposals you can approve/decline as well as those that you've created.
47+
- **Proposal Notifications**: You can now configure to be notified when you are assigned to review a proposal, or a proposal you raised is approved or denied from **Settings** > **My Notifications**.
48+
- **Observe**: Introducing on-demand smart assertions in beta (enabled by request)! You can now create freshness, volume, and field metric anomaly detection checks via the UI for any table that's supported by Observe.
49+
- **Observe**: Smart assertion tuning - Improve your anomaly monitors by adjusting training data lookback window, tightening or loosening the sensitivity, and adding data-point exclusion windows.
50+
- **Observe**: Incidents V2 - Incidents have received a significant uplift! With capabilities to set assignees, severity, stage, and impacted assets. Also: search, group & filter incidents on the incidents tab.
51+
- **Remote Executor**: Introduces a new **Executors** tab in the **Manage Data Sources** page for improved management and observability of Remote Executor Pools
52+
- **Remote Executors**: Create and provision new Executor Pools directly from the UI and optionally set them as default for future Ingestion Source configurations. Easily view the set of deployed executors, their status, configured Ingestion Sources, and current running tasks.
53+
- **Compliance Forms**: Support deep linking to a specific Compliance Form completion experience inside the Compliance Center.
54+
- **Search**: Support searching for Deprecated assets via a new Deprecated filter. Also, show deprecation in Lineage sidebar as an upstream health warning.
55+
56+
- Platform
57+
- **OpenAPI**: API Tracing improvements (Experimental)
58+
- **GraphQL**: Sorting added to `scrollAcrossEntities`
59+
- **Performance**: Improved access policy performance when using nested domains and containers
60+
- **OpenAPI**: Operation endpoints created to fetch raw ES documents and view kafka lag on MCP and MCL topics
61+
- **OpenAPI**: Additional validation for URNs and entity/aspect names when using async endpoints
62+
- **OpenAPI**: Fixed timeseries aspect ingestion bug #12912
63+
- **Restore Indices Job**: Support added for read-only replicas
64+
65+
- Ingestion
66+
- **MLflow**: Support dataset ↔ run lineage with customizable platform mapping for datasets.
67+
- **OpenAPI**: Ingestion can use OpenAPI instead of Rest.li, set environment variable `DATAHUB_REST_SINK_DEFAULT_ENDPOINT=OPENAPI`
68+
- **API Tracing**: Ingestion can trace each async write by tracking the write to primary and search storage, set environment variable `DATAHUB_REST_TRACE_MODE=ENABLED`

gradle/versioning/versioning-global.gradle

+8-1
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,14 @@ def detailedVersionString = "0.0.0-unknown-SNAPSHOT"
2323
def cliMajorVersion = "0.15.0" // base default cli major version
2424
def snapshotVersion = false
2525
def javaVersion = ""
26+
// tag for docker images. the prefix v is used in tag only if the tag is computed from a version. If a releaseVersion is supplied, use it as is.
27+
// This enables pr tags to be used without the v prefix. This variance was previouslyhandled in the CI steps build images without using gradle.
28+
def versionTag = ""
2629

2730
if (project.hasProperty("releaseVersion")) {
2831
version = releaseVersion
2932
detailedVersionString = releaseVersion
33+
versionTag = releaseVersion
3034
} else {
3135
try {
3236
// apply this plugin in a try-catch block so that we can handle cases without .git directory
@@ -35,6 +39,7 @@ if (project.hasProperty("releaseVersion")) {
3539
detailedVersionString = gitVersion()
3640
version = details.lastTag
3741
version = version.startsWith("v")? version.substring(1): version
42+
versionTag = "v" + version
3843
def suffix = details.isCleanTag? "": "-SNAPSHOT"
3944
snapshotVersion = ! details.isCleanTag
4045
}
@@ -78,14 +83,16 @@ if (snapshotVersion) {
7883
// we are unable to part the last token as an integer, so we just append SNAPSHOT to this version
7984
javaVersion = versionParts[0..versionParts.size()-1].join('.') + '-SNAPSHOT'
8085
}
86+
versionTag = "v" + version
8187
}
8288

8389
// Note: No task, we want this executed during config phase, once for rootProject.
8490
def data = [
8591
fullVersion: detailedVersionString,
8692
cliMajorVersion: cliMajorVersion,
8793
version: version,
88-
javaVersion: javaVersion
94+
javaVersion: javaVersion,
95+
versionTag: versionTag
8996
]
9097

9198
// Convert to JSON

gradle/versioning/versioning.gradle

+5-1
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,12 @@ import org.apache.tools.ant.filters.ReplaceTokens
44

55
def detailedVersionString = "0.0.0-unknown-SNAPSHOT"
66
def cliMajorVersion = "0.15.0" // base default cli major version
7-
87
def inputFile = file("${rootProject.buildDir}/version.json")
98

9+
ext {
10+
versionTag = "v${detailedVersionString}"
11+
}
12+
1013
task readJsonData {
1114
if (inputFile.exists()) {
1215
def jsonSlurper = new JsonSlurper()
@@ -15,6 +18,7 @@ task readJsonData {
1518
detailedVersionString = data.fullVersion
1619
cliMajorVersion = data.cliMajorVersion
1720
version = data.version
21+
versionTag = data.versionTag
1822
} else {
1923
println "git.properties JSON file not found: ${inputFile.path}"
2024
}

metadata-jobs/mae-consumer-job/build.gradle

+1-1
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ bootJar {
4747

4848
docker {
4949
dependsOn(bootJar)
50-
name "${docker_registry}/${docker_repo}:v${version}"
50+
name "${docker_registry}/${docker_repo}:${versionTag}"
5151
//version "v${version}"
5252
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
5353
files bootJar.outputs.files

metadata-jobs/mce-consumer-job/build.gradle

+1-1
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ bootJar {
5858

5959
docker {
6060
dependsOn(bootJar)
61-
name "${docker_registry}/${docker_repo}:v${version}"
61+
name "${docker_registry}/${docker_repo}:${versionTag}"
6262
//version "v${version}"
6363
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
6464
files bootJar.outputs.files

metadata-service/war/build.gradle

+2-2
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ bootRun {
8686

8787
docker {
8888
dependsOn bootJar
89-
name "${docker_registry}/${docker_repo}:v${version}"
89+
name "${docker_registry}/${docker_repo}:${versionTag}"
9090
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
9191
files bootJar.outputs.files
9292
files fileTree(rootProject.projectDir) {
@@ -123,4 +123,4 @@ test {
123123
"com.linkedin.gms.ServletConfig",
124124
"com.linkedin.gms.GMSApplication"]
125125
}
126-
}
126+
}

smoke-test/conftest.py

+39-3
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,43 @@ def pytest_sessionfinish(session, exitstatus):
4949
send_message(exitstatus)
5050

5151

52+
def bin_pack_tasks(tasks, n_buckets):
53+
"""
54+
Bin-pack tasks into n_buckets with roughly equal weights.
55+
56+
Parameters:
57+
tasks (list): List of (task, weight) tuples. If only task is provided, weight defaults to 1.
58+
n_buckets (int): Number of buckets to distribute tasks into.
59+
60+
Returns:
61+
list: List of buckets, where each bucket is a list of tasks.
62+
"""
63+
# Normalize the tasks to ensure they're all (task, weight) tuples
64+
normalized_tasks = []
65+
for task in tasks:
66+
if isinstance(task, tuple) and len(task) == 2:
67+
normalized_tasks.append(task)
68+
else:
69+
normalized_tasks.append((task, 1))
70+
71+
# Sort tasks by weight in descending order
72+
sorted_tasks = sorted(normalized_tasks, key=lambda x: x[1], reverse=True)
73+
74+
# Initialize the buckets with zero weight
75+
buckets: List = [[] for _ in range(n_buckets)]
76+
bucket_weights: List[int] = [0] * n_buckets
77+
78+
# Assign each task to the bucket with the lowest current weight
79+
for task, weight in sorted_tasks:
80+
# Find the bucket with the minimum weight
81+
min_bucket_idx = bucket_weights.index(min(bucket_weights))
82+
83+
# Add the task to this bucket
84+
buckets[min_bucket_idx].append(task)
85+
bucket_weights[min_bucket_idx] += weight
86+
87+
return buckets
88+
5289
def get_batch_start_end(num_tests: int) -> Tuple[int, int]:
5390
batch_count_env = os.getenv("BATCH_COUNT", 1)
5491
batch_count = int(batch_count_env)
@@ -71,16 +108,15 @@ def get_batch_start_end(num_tests: int) -> Tuple[int, int]:
71108
batch_end = batch_start + batch_size
72109
# We must have exactly as many batches as specified by BATCH_COUNT.
73110
if (
74-
num_tests - batch_end < batch_size
75-
): # We must have exactly as many batches as specified by BATCH_COUNT, put the remaining in the last batch.
111+
batch_number == batch_count - 1 # this is the last batch
112+
): # If ths is last batch put any remaining tests in the last batch.
76113
batch_end = num_tests
77114

78115
if batch_count > 0:
79116
print(f"Running tests for batch {batch_number} of {batch_count}")
80117

81118
return batch_start, batch_end
82119

83-
84120
def pytest_collection_modifyitems(
85121
session: pytest.Session, config: pytest.Config, items: List[Item]
86122
) -> None:

smoke-test/smoke.sh

+5-3
Original file line numberDiff line numberDiff line change
@@ -36,11 +36,13 @@ source ./set-cypress-creds.sh
3636
# set environment variables for the test
3737
source ./set-test-env-vars.sh
3838

39+
echo "TEST_STRATEGY: $TEST_STRATEGY, BATCH_COUNT: $BATCH_COUNT, BATCH_NUMBER: $BATCH_NUMBER"
40+
3941
# TEST_STRATEGY:
4042
# if set to pytests, runs all pytests, skips cypress tests(though cypress test launch is via a pytest).
4143
# if set tp cypress, runs all cypress tests
4244
# if blank, runs all.
43-
# When invoked via the github action, BATCH_COUNT and BATCH_NUM env vars are set to run a slice of those tests per
45+
# When invoked via the github action, BATCH_COUNT and BATCH_NUMBER env vars are set to run a slice of those tests per
4446
# worker for parallelism. docker-unified.yml generates a test matrix of pytests/cypress in batches. As number of tests
4547
# increase, the batch_count config (in docker-unified.yml) may need adjustment.
4648
if [[ "${TEST_STRATEGY}" == "pytests" ]]; then
@@ -51,7 +53,7 @@ elif [[ "${TEST_STRATEGY}" == "cypress" ]]; then
5153
# github workflow test matrix will invoke this in multiple runners for each batch.
5254
# Skipping the junit at the pytest level since cypress itself generates junits on a per-test basis. The pytest is a single test for all cypress
5355
# tests and isnt very helpful.
54-
pytest -rP --durations=20 -vv --continue-on-collection-errors tests/cypress/integration_test.py
56+
pytest -rP --durations=20 -vvs --continue-on-collection-errors tests/cypress/integration_test.py
5557
else
56-
pytest -rP --durations=20 -vv --continue-on-collection-errors --junit-xml=junit.smoke-all.xml
58+
pytest -rP --durations=20 -vvs --continue-on-collection-errors --junit-xml=junit.smoke-all.xml
5759
fi

0 commit comments

Comments
 (0)