diff --git a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/AsyncImportApiExample.java b/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/AsyncImportApiExample.java
new file mode 100644
index 0000000000..3cfebc7b45
--- /dev/null
+++ b/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/AsyncImportApiExample.java
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.examples.sampleapp;
+
+import org.apache.atlas.AtlasClientV2;
+import org.apache.atlas.AtlasServiceException;
+import org.apache.atlas.model.PList;
+import org.apache.atlas.model.impexp.AsyncImportStatus;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
+import org.apache.atlas.model.impexp.AtlasImportRequest;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URL;
+import java.nio.file.Files;
+
+public class AsyncImportApiExample {
+ private final AtlasClientV2 client;
+
+ public AsyncImportApiExample(AtlasClientV2 client) {
+ this.client = client;
+ }
+
+ public AtlasAsyncImportRequest testImportAsyncWithZip() throws Exception {
+ URL url = AsyncImportApiExample.class.getClassLoader().getResource("importFile.zip");
+
+ if (url == null) {
+ System.err.println("importFile.zip not found in classpath.");
+
+ return null;
+ }
+
+ File zipFile = new File(url.toURI());
+ AtlasImportRequest request = new AtlasImportRequest();
+
+ try (InputStream zipStream = Files.newInputStream(zipFile.toPath())) {
+ System.out.println("Testing Async Import with ZIP file...");
+
+ try {
+ AtlasAsyncImportRequest asyncRequest = client.importAsync(request, zipStream);
+
+ System.out.println("Async Import Request Created: " + asyncRequest);
+
+ return asyncRequest;
+ } catch (AtlasServiceException e) {
+ System.err.println("Async Import with ZIP file failed: " + e.getMessage());
+
+ throw e;
+ }
+ } catch (IOException e) {
+ System.err.println("Failed to open ZIP file: " + e.getMessage());
+
+ throw e;
+ }
+ }
+
+ public void testGetAsyncImportStatus() throws Exception {
+ System.out.println("Testing getAllAsyncImportStatus...");
+
+ try {
+ PList statuses = client.getAsyncImportStatus(null, null);
+
+ System.out.println("All Async Import Statuses:");
+ for (AsyncImportStatus status : statuses.getList()) {
+ System.out.println(status);
+ }
+ } catch (AtlasServiceException e) {
+ System.err.println("Failed to fetch all async import statuses: " + e.getMessage());
+
+ throw e;
+ }
+ }
+
+ public void testGetAsyncImportStatusById(String importId) throws Exception {
+ System.out.println("Testing getImportStatus for id=" + importId);
+
+ try {
+ AtlasAsyncImportRequest importStatus = client.getAsyncImportStatusById(importId);
+
+ System.out.println("Import Status for ID (" + importId + "): " + importStatus);
+ } catch (AtlasServiceException e) {
+ System.err.println("Failed to fetch import status for id=" + importId + ": " + e.getMessage());
+
+ throw e;
+ }
+ }
+
+ public void testAbortAsyncImportById(String importId) throws Exception {
+ System.out.println("Testing abortAsyncImport for id=" + importId);
+
+ try {
+ client.abortAsyncImport(importId);
+
+ System.out.println("Successfully aborted async import with ID: " + importId);
+ } catch (AtlasServiceException e) {
+ System.err.println("Failed to abort async import for ID (" + importId + "): " + e.getMessage());
+
+ throw e;
+ }
+ }
+}
diff --git a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/SampleApp.java b/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/SampleApp.java
index 9fbd05a609..7666ab0341 100644
--- a/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/SampleApp.java
+++ b/atlas-examples/sample-app/src/main/java/org/apache/atlas/examples/sampleapp/SampleApp.java
@@ -19,6 +19,7 @@
import org.apache.atlas.AtlasClientV2;
import org.apache.atlas.AtlasException;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.utils.AuthenticationUtil;
@@ -75,6 +76,18 @@ public static void main(String[] args) throws Exception {
sampleApp.glossaryExample();
entityExample.deleteEntities();
+
+ // Async Import Examples
+ AsyncImportApiExample asyncImportApiExample = new AsyncImportApiExample(sampleApp.getClient());
+ AtlasAsyncImportRequest asyncRequest = asyncImportApiExample.testImportAsyncWithZip();
+
+ asyncImportApiExample.testGetAsyncImportStatus();
+
+ String testImportId = asyncRequest.getImportId();
+
+ asyncImportApiExample.testGetAsyncImportStatusById(testImportId);
+
+ asyncImportApiExample.testAbortAsyncImportById(testImportId);
} finally {
if (sampleApp != null && sampleApp.getClient() != null) {
sampleApp.getClient().close();
diff --git a/atlas-examples/sample-app/src/main/resources/importFile.zip b/atlas-examples/sample-app/src/main/resources/importFile.zip
new file mode 100644
index 0000000000..8005c8fa92
Binary files /dev/null and b/atlas-examples/sample-app/src/main/resources/importFile.zip differ
diff --git a/client/client-v2/src/main/java/org/apache/atlas/AtlasClientV2.java b/client/client-v2/src/main/java/org/apache/atlas/AtlasClientV2.java
index 1bba1e1792..a485ea329f 100644
--- a/client/client-v2/src/main/java/org/apache/atlas/AtlasClientV2.java
+++ b/client/client-v2/src/main/java/org/apache/atlas/AtlasClientV2.java
@@ -20,14 +20,17 @@
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.annotations.VisibleForTesting;
+import com.sun.jersey.api.client.GenericType;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.core.header.FormDataContentDisposition;
import com.sun.jersey.core.util.MultivaluedMapImpl;
+import com.sun.jersey.multipart.BodyPart;
import com.sun.jersey.multipart.FormDataBodyPart;
import com.sun.jersey.multipart.FormDataMultiPart;
import com.sun.jersey.multipart.MultiPart;
import com.sun.jersey.multipart.file.StreamDataBodyPart;
import org.apache.atlas.bulkimport.BulkImportResponse;
+import org.apache.atlas.model.PList;
import org.apache.atlas.model.SearchFilter;
import org.apache.atlas.model.audit.AtlasAuditEntry;
import org.apache.atlas.model.audit.AuditReductionCriteria;
@@ -43,6 +46,9 @@
import org.apache.atlas.model.glossary.AtlasGlossaryTerm;
import org.apache.atlas.model.glossary.relations.AtlasRelatedCategoryHeader;
import org.apache.atlas.model.glossary.relations.AtlasRelatedTermHeader;
+import org.apache.atlas.model.impexp.AsyncImportStatus;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
+import org.apache.atlas.model.impexp.AtlasImportRequest;
import org.apache.atlas.model.instance.AtlasClassification;
import org.apache.atlas.model.instance.AtlasClassification.AtlasClassifications;
import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo;
@@ -82,6 +88,7 @@
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
+import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.time.Instant;
@@ -140,6 +147,13 @@ public class AtlasClientV2 extends AtlasBaseClient {
//IndexRecovery APIs
private static final String INDEX_RECOVERY_URI = BASE_URI + "v2/indexrecovery";
+ // Async Import APIs
+ private static final String ASYNC_IMPORT_URI = BASE_URI + "admin/async/import";
+ private static final String ASYNC_IMPORT_STATUS_URI = BASE_URI + "admin/async/import/status";
+
+ private static final String IMPORT_REQUEST_PARAMTER = "request";
+ private static final String IMPORT_DATA_PARAMETER = "data";
+
public AtlasClientV2(String[] baseUrl, String[] basicAuthUserNamePassword) {
super(baseUrl, basicAuthUserNamePassword);
}
@@ -1039,6 +1053,38 @@ public API formatPathWithParameter(API api, String... params) {
return formatPathParameters(api, params);
}
+ public AtlasAsyncImportRequest importAsync(AtlasImportRequest request, InputStream stream) throws AtlasServiceException {
+ return performAsyncImport(getImportRequestBodyPart(request), new StreamDataBodyPart(IMPORT_DATA_PARAMETER, stream));
+ }
+
+ /**
+ * Retrieves a list of asynchronous import statuses.
+ * If offset or limit is null, defaults to offset = 0 and limit = 50.
+ *
+ * @param offset Starting index for the result set
+ * @param limit Maximum number of results to return
+ * @return A paginated list of asynchronous import statuses
+ * @throws AtlasServiceException if the request fails
+ */
+ public PList getAsyncImportStatus(Integer offset, Integer limit) throws AtlasServiceException {
+ int actualOffset = (offset != null) ? offset : 0;
+ int actualLimit = (limit != null) ? limit : 50;
+
+ MultivaluedMap queryParams = new MultivaluedMapImpl();
+ queryParams.add("offset", String.valueOf(actualOffset));
+ queryParams.add("limit", String.valueOf(actualLimit));
+
+ return callAPI(API_V2.ASYNC_IMPORT_STATUS, new GenericType>() {}, queryParams);
+ }
+
+ public AtlasAsyncImportRequest getAsyncImportStatusById(String importId) throws AtlasServiceException {
+ return callAPI(formatPathParameters(API_V2.ASYNC_IMPORT_STATUS_BY_ID, importId), AtlasAsyncImportRequest.class, null);
+ }
+
+ public void abortAsyncImport(String importId) throws AtlasServiceException {
+ callAPI(formatPathParameters(API_V2.ABORT_ASYNC_IMPORT_BY_ID, importId), null, null);
+ }
+
@Override
protected API formatPathParameters(API api, String... params) {
return new API(String.format(api.getPath(), params), api.getMethod(), api.getExpectedStatus());
@@ -1152,6 +1198,20 @@ private T getTypeDefByGuid(String guid, Class typeDefClass) throws AtlasS
return callAPI(api, typeDefClass, null);
}
+ private FormDataBodyPart getImportRequestBodyPart(AtlasImportRequest request) {
+ return new FormDataBodyPart(IMPORT_REQUEST_PARAMTER, AtlasType.toJson(request), MediaType.APPLICATION_JSON_TYPE);
+ }
+
+ private AtlasAsyncImportRequest performAsyncImport(BodyPart requestPart, BodyPart filePart) throws AtlasServiceException {
+ try (FormDataMultiPart formDataMultiPart = new FormDataMultiPart()) {
+ MultiPart multipartEntity = formDataMultiPart.bodyPart(requestPart).bodyPart(filePart);
+
+ return callAPI(API_V2.ASYNC_IMPORT, AtlasAsyncImportRequest.class, multipartEntity);
+ } catch (IOException e) {
+ throw new AtlasServiceException(e);
+ }
+ }
+
public static class API_V2 extends API {
// TypeDef APIs
public static final API_V2 GET_TYPEDEF_BY_NAME = new API_V2(TYPEDEF_BY_NAME, HttpMethod.GET, Response.Status.OK);
@@ -1249,6 +1309,12 @@ public static class API_V2 extends API {
public static final API_V2 GET_ATLAS_AUDITS = new API_V2(ATLAS_AUDIT_API, HttpMethod.POST, Response.Status.OK);
public static final API_V2 AGEOUT_ATLAS_AUDITS = new API_V2(ATLAS_AUDIT_API + "ageout/", HttpMethod.POST, Response.Status.OK);
+ // Async Import APIs
+ public static final API_V2 ASYNC_IMPORT = new API_V2(ASYNC_IMPORT_URI, HttpMethod.POST, Response.Status.OK, MediaType.MULTIPART_FORM_DATA, MediaType.APPLICATION_JSON);
+ public static final API_V2 ASYNC_IMPORT_STATUS = new API_V2(ASYNC_IMPORT_STATUS_URI, HttpMethod.GET, Response.Status.OK);
+ public static final API_V2 ASYNC_IMPORT_STATUS_BY_ID = new API_V2(ASYNC_IMPORT_STATUS_URI + "/%s", HttpMethod.GET, Response.Status.OK);
+ public static final API_V2 ABORT_ASYNC_IMPORT_BY_ID = new API_V2(ASYNC_IMPORT_URI + "/%s", HttpMethod.DELETE, Response.Status.NO_CONTENT);
+
// Glossary APIs
public static final API_V2 GET_ALL_GLOSSARIES = new API_V2(GLOSSARY_URI, HttpMethod.GET, Response.Status.OK);
public static final API_V2 GET_GLOSSARY_BY_GUID = new API_V2(GLOSSARY_URI + "/%s", HttpMethod.GET, Response.Status.OK);
diff --git a/common/src/main/java/org/apache/atlas/repository/Constants.java b/common/src/main/java/org/apache/atlas/repository/Constants.java
index 3addbcd0d1..dcc3f123e9 100644
--- a/common/src/main/java/org/apache/atlas/repository/Constants.java
+++ b/common/src/main/java/org/apache/atlas/repository/Constants.java
@@ -229,6 +229,14 @@ public final class Constants {
public static final String PROPERTY_KEY_GUIDS_TO_AGEOUT_BY_CUSTOM = encodePropertyKey(AUDIT_REDUCTION_PREFIX + "custom");
public static final String PROPERTY_KEY_GUIDS_TO_SWEEPOUT = encodePropertyKey(AUDIT_REDUCTION_PREFIX + "sweepout");
+ /**
+ * Atlas Async Import vertex property keys.
+ */
+ public static final String ATLAS_ASYNC_IMPORT_PREFIX = INTERNAL_PROPERTY_KEY_PREFIX + "AtlasAsyncImportRequest.";
+ public static final String PROPERTY_KEY_RECEIVED_TIME = encodePropertyKey(ATLAS_ASYNC_IMPORT_PREFIX + "receivedTime");
+ public static final String PROPERTY_KEY_ASYNC_IMPORT_STATUS = encodePropertyKey(ATLAS_ASYNC_IMPORT_PREFIX + "status");
+ public static final String PROPERTY_KEY_ASYNC_IMPORT_ID = encodePropertyKey(ATLAS_ASYNC_IMPORT_PREFIX + "importId");
+
public static final String SQOOP_SOURCE = "sqoop";
public static final String FALCON_SOURCE = "falcon";
public static final String HBASE_SOURCE = "hbase";
diff --git a/intg/src/main/java/org/apache/atlas/AtlasConfiguration.java b/intg/src/main/java/org/apache/atlas/AtlasConfiguration.java
index 1b6fbd74be..ff871b77ef 100644
--- a/intg/src/main/java/org/apache/atlas/AtlasConfiguration.java
+++ b/intg/src/main/java/org/apache/atlas/AtlasConfiguration.java
@@ -111,7 +111,10 @@ public enum AtlasConfiguration {
ATLAS_AUDIT_DEFAULT_AGEOUT_IGNORE_TTL("atlas.audit.default.ageout.ignore.ttl", false),
ATLAS_AUDIT_AGING_TTL_TEST_AUTOMATION("atlas.audit.aging.ttl.test.automation", false), //Only for test automation
RELATIONSHIP_SEARCH_ENABLED("atlas.relationship.search.enabled", false),
- UI_TASKS_TAB_USE_ENABLED("atlas.tasks.ui.tab.enabled", false);
+ UI_TASKS_TAB_USE_ENABLED("atlas.tasks.ui.tab.enabled", false),
+ ATLAS_ASYNC_IMPORT_MIN_DURATION_OVERRIDE_TEST_AUTOMATION("atlas.async.import.min.duration.override.test.automation", false),
+ ASYNC_IMPORT_TOPIC_PREFIX("atlas.async.import.topic.prefix", "ATLAS_IMPORT_"),
+ ASYNC_IMPORT_REQUEST_ID_PREFIX("atlas.async.import.request_id.prefix", "async_import_");
private static final Configuration APPLICATION_PROPERTIES;
diff --git a/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java b/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java
index 521d19c01c..9256d0f886 100644
--- a/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java
+++ b/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java
@@ -208,6 +208,7 @@ public enum AtlasErrorCode {
FILE_NAME_NOT_FOUND(404, "ATLAS-404-00-014", "File name should not be blank"),
NO_TYPE_NAME_ON_VERTEX(404, "ATLAS-404-00-015", "No typename found for given entity with guid: {0}"),
NO_LINEAGE_CONSTRAINTS_FOR_GUID(404, "ATLAS-404-00-016", "No lineage constraints found for requested entity with guid : {0}"),
+ IMPORT_NOT_FOUND(404, "ATLAS-404-00-017", "Import id {0} is not found"),
METHOD_NOT_ALLOWED(405, "ATLAS-405-00-001", "Error 405 - The request method {0} is inappropriate for the URL: {1}"),
@@ -226,6 +227,7 @@ public enum AtlasErrorCode {
GLOSSARY_IMPORT_FAILED(409, "ATLAS-409-00-011", "Glossary import failed"),
METRICSSTAT_ALREADY_EXISTS(409, "ATLAS-409-00-012", "Metric Statistics already collected at {0}"),
PENDING_TASKS_ALREADY_IN_PROGRESS(409, "ATLAS-409-00-013", "There are already {0} pending tasks in queue"),
+ IMPORT_ABORT_NOT_ALLOWED(409, "ATLAS-409-00-016", "Import id {0} is currently in state {1}, cannot be aborted"),
// All internal errors go here
INTERNAL_ERROR(500, "ATLAS-500-00-001", "Internal server error {0}"),
@@ -250,7 +252,12 @@ public enum AtlasErrorCode {
FAILED_TO_UPLOAD(500, "ATLAS-500-00-015", "Error occurred while uploading the file: {0}"),
FAILED_TO_CREATE_GLOSSARY_TERM(500, "ATLAS-500-00-016", "Error occurred while creating glossary term: {0}"),
FAILED_TO_UPDATE_GLOSSARY_TERM(500, "ATLAS-500-00-017", "Error occurred while updating glossary term: {0}"),
- NOTIFICATION_EXCEPTION(500, "ATLAS-500-00-018", "{0}");
+ NOTIFICATION_EXCEPTION(500, "ATLAS-500-00-018", "{0}"),
+ IMPORT_UPDATE_FAILED(500, "ATLAS-500-00-019", "Failed to update import with id={0}"),
+ IMPORT_REGISTRATION_FAILED(500, "ATLAS-500-00-020", "Failed to register import request"),
+ IMPORT_FAILED(500, "ATLAS-500-00-021", "Import with id {0} failed"),
+ ABORT_IMPORT_FAILED(500, "ATLAS-500-00-022", "Failed to abort import with id {0}"),
+ IMPORT_QUEUEING_FAILED(500, "ATLAS-500-00-023", "Failed to add import with id {0} to request queue, please try again later");
private static final Logger LOG = LoggerFactory.getLogger(AtlasErrorCode.class);
private final String errorCode;
diff --git a/intg/src/main/java/org/apache/atlas/model/impexp/AsyncImportStatus.java b/intg/src/main/java/org/apache/atlas/model/impexp/AsyncImportStatus.java
new file mode 100644
index 0000000000..a1c5bf47d6
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/model/impexp/AsyncImportStatus.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.model.impexp;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus;
+
+import java.io.Serializable;
+
+import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE;
+import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+@JsonAutoDetect(getterVisibility = PUBLIC_ONLY, setterVisibility = PUBLIC_ONLY, fieldVisibility = NONE)
+@JsonInclude(JsonInclude.Include.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class AsyncImportStatus implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private String importId;
+ private ImportStatus status;
+ private String importRequestReceivedTime;
+ private String importRequestUser;
+
+ public AsyncImportStatus() {}
+
+ public AsyncImportStatus(String importId, ImportStatus status, String importRequestReceivedTime, String importRequestUser) {
+ this.importId = importId;
+ this.status = status;
+ this.importRequestReceivedTime = importRequestReceivedTime;
+ this.importRequestUser = importRequestUser;
+ }
+
+ public String getImportId() {
+ return importId;
+ }
+
+ public ImportStatus getStatus() {
+ return status;
+ }
+
+ public String getImportRequestReceivedTime() {
+ return importRequestReceivedTime;
+ }
+
+ public String getImportRequestUser() {
+ return importRequestUser;
+ }
+
+ @Override
+ public String toString() {
+ return "AsyncImportStatus{" +
+ "importId='" + importId + '\'' +
+ ", status='" + status + '\'' +
+ ", importRequestReceivedTime='" + importRequestReceivedTime + '\'' +
+ ", importRequestUser='" + importRequestUser + '\'' +
+ '}';
+ }
+}
diff --git a/intg/src/main/java/org/apache/atlas/model/impexp/AtlasAsyncImportRequest.java b/intg/src/main/java/org/apache/atlas/model/impexp/AtlasAsyncImportRequest.java
new file mode 100644
index 0000000000..589aa8dadd
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/model/impexp/AtlasAsyncImportRequest.java
@@ -0,0 +1,406 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.model.impexp;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import org.apache.atlas.AtlasConfiguration;
+import org.apache.atlas.model.AtlasBaseModelObject;
+import org.apache.atlas.utils.AtlasEntityUtil;
+
+import java.io.Serializable;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.TimeZone;
+
+import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE;
+import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+@JsonAutoDetect(getterVisibility = PUBLIC_ONLY, setterVisibility = PUBLIC_ONLY, fieldVisibility = NONE)
+@JsonInclude(JsonInclude.Include.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class AtlasAsyncImportRequest extends AtlasBaseModelObject implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ public enum ImportStatus {
+ STAGING("STAGING"),
+ WAITING("WAITING"),
+ PROCESSING("PROCESSING"),
+ SUCCESSFUL("SUCCESSFUL"),
+ PARTIAL_SUCCESS("PARTIAL_SUCCESS"),
+ ABORTED("ABORTED"),
+ FAILED("FAILED");
+
+ private final String status;
+
+ ImportStatus(String status) {
+ this.status = status;
+ }
+
+ public String getStatus() {
+ return status;
+ }
+
+ @Override
+ public String toString() {
+ return status;
+ }
+ }
+
+ private String importId;
+ private ImportStatus status;
+ private ImportDetails importDetails;
+ private long receivedTime;
+ private long stagedTime;
+ private long processingStartTime;
+ private long completedTime;
+ private AtlasImportResult importResult;
+
+ @JsonIgnore
+ private ImportTrackingInfo importTrackingInfo;
+
+ public AtlasAsyncImportRequest() {}
+
+ public AtlasAsyncImportRequest(String guid) {
+ setGuid(guid);
+ }
+
+ public AtlasAsyncImportRequest(AtlasImportResult result) {
+ this.importResult = result;
+ this.status = ImportStatus.STAGING;
+ this.receivedTime = 0L;
+ this.stagedTime = 0L;
+ this.processingStartTime = 0L;
+ this.completedTime = 0L;
+ this.importDetails = new ImportDetails();
+ this.importTrackingInfo = new ImportTrackingInfo(null, 0);
+
+ setGuid(getGuid());
+ }
+
+ public String getImportId() {
+ return importId;
+ }
+
+ public void setImportId(String importId) {
+ this.importId = importId;
+
+ if (importTrackingInfo != null) {
+ importTrackingInfo.setRequestId(AtlasConfiguration.ASYNC_IMPORT_REQUEST_ID_PREFIX.getString() + importId + "@" + AtlasEntityUtil.getMetadataNamespace());
+ }
+ }
+
+ public ImportStatus getStatus() {
+ return status;
+ }
+
+ public void setStatus(ImportStatus status) {
+ this.status = status;
+ }
+
+ public ImportDetails getImportDetails() {
+ return importDetails;
+ }
+
+ public void setImportDetails(ImportDetails importDetails) {
+ this.importDetails = importDetails;
+ }
+
+ public long getReceivedTime() {
+ return receivedTime;
+ }
+
+ public void setReceivedTime(long receivedTime) {
+ this.receivedTime = receivedTime;
+ }
+
+ public long getStagedTime() {
+ return stagedTime;
+ }
+
+ public void setStagedTime(long stagedTime) {
+ this.stagedTime = stagedTime;
+ }
+
+ public long getProcessingStartTime() {
+ return processingStartTime;
+ }
+
+ public void setProcessingStartTime(long processingStartTime) {
+ this.processingStartTime = processingStartTime;
+ }
+
+ @JsonIgnore
+ public String getTopicName() {
+ return AtlasConfiguration.ASYNC_IMPORT_TOPIC_PREFIX.getString() + importId;
+ }
+
+ public AtlasImportResult getImportResult() {
+ return importResult;
+ }
+
+ public void setImportResult(AtlasImportResult importResult) {
+ this.importResult = importResult;
+ }
+
+ public long getCompletedTime() {
+ return completedTime;
+ }
+
+ public void setCompletedTime(long completedTime) {
+ this.completedTime = completedTime;
+ }
+
+ public ImportTrackingInfo getImportTrackingInfo() {
+ return importTrackingInfo;
+ }
+
+ public void setImportTrackingInfo(ImportTrackingInfo importTrackingInfo) {
+ this.importTrackingInfo = importTrackingInfo;
+ }
+
+ @JsonIgnore
+ public AsyncImportStatus toImportMinInfo() {
+ return new AsyncImportStatus(this.getImportId(), status, toIsoDate(new Date(this.receivedTime)), importResult.getUserName());
+ }
+
+ private String toIsoDate(Date value) {
+ final TimeZone tz = TimeZone.getTimeZone("UTC");
+ final DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
+
+ df.setTimeZone(tz);
+
+ return df.format(value);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ } else if (!(o instanceof AtlasAsyncImportRequest)) {
+ return false;
+ } else if (!super.equals(o)) {
+ return false;
+ }
+
+ AtlasAsyncImportRequest that = (AtlasAsyncImportRequest) o;
+
+ return Objects.equals(importResult, that.importResult) &&
+ Objects.equals(importId, that.importId) &&
+ Objects.equals(status, that.status) &&
+ Objects.equals(importDetails, that.importDetails) &&
+ (importTrackingInfo == null ? that.importTrackingInfo == null : (that.importTrackingInfo != null && Objects.equals(importTrackingInfo.getRequestId(), that.importTrackingInfo.getRequestId()))) &&
+ Objects.equals(receivedTime, that.receivedTime) &&
+ Objects.equals(stagedTime, that.stagedTime) &&
+ Objects.equals(processingStartTime, that.processingStartTime) &&
+ Objects.equals(completedTime, that.completedTime);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(super.hashCode(), importResult, importId, status, importDetails,
+ importTrackingInfo == null ? null : importTrackingInfo.getRequestId(), receivedTime, stagedTime, processingStartTime, completedTime);
+ }
+
+ @Override
+ protected StringBuilder toString(StringBuilder sb) {
+ sb.append(", importResult=").append(importResult);
+ sb.append(", requestId=").append(importTrackingInfo == null ? null : importTrackingInfo.getRequestId());
+ sb.append(", importId=").append(importId);
+ sb.append(", status=").append(status);
+ sb.append(", receivedTime=").append(receivedTime);
+ sb.append(", stagedTime=").append(stagedTime);
+ sb.append(", processingStartTime=").append(processingStartTime);
+ sb.append(", completedTime=").append(completedTime);
+ sb.append(", importDetails=").append(importDetails);
+
+ return sb;
+ }
+
+ @JsonAutoDetect(getterVisibility = PUBLIC_ONLY, setterVisibility = PUBLIC_ONLY, fieldVisibility = NONE)
+ @JsonInclude(JsonInclude.Include.NON_NULL)
+ @JsonIgnoreProperties(ignoreUnknown = true)
+ public static class ImportDetails implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private int publishedEntityCount;
+ private int totalEntitiesCount;
+ private int importedEntitiesCount;
+ private int failedEntitiesCount;
+ private List failedEntities;
+ private float importProgress;
+ private Map failures;
+
+ @JsonIgnore
+ private List creationOrder = new ArrayList<>();
+
+ public ImportDetails() {
+ this.failedEntities = new ArrayList<>();
+ this.failures = new HashMap<>();
+ }
+
+ public int getPublishedEntityCount() {
+ return publishedEntityCount;
+ }
+
+ public void setPublishedEntityCount(int count) {
+ this.publishedEntityCount = count;
+ }
+
+ public int getTotalEntitiesCount() {
+ return totalEntitiesCount;
+ }
+
+ public void setTotalEntitiesCount(int count) {
+ this.totalEntitiesCount = count;
+ }
+
+ public int getImportedEntitiesCount() {
+ return importedEntitiesCount;
+ }
+
+ public void setImportedEntitiesCount(int count) {
+ this.importedEntitiesCount = count;
+ }
+
+ public int getFailedEntitiesCount() {
+ return failedEntitiesCount;
+ }
+
+ public void setFailedEntitiesCount(int count) {
+ this.failedEntitiesCount = count;
+ }
+
+ public float getImportProgress() {
+ return importProgress;
+ }
+
+ public void setImportProgress(float progress) {
+ this.importProgress = progress;
+ }
+
+ public Map getFailures() {
+ return failures;
+ }
+
+ public void addFailure(String guid, String message) {
+ this.failures.put(guid, message);
+ }
+
+ public List getFailedEntities() {
+ return failedEntities;
+ }
+
+ public void setFailedEntities(List failedEntities) {
+ this.failedEntities = failedEntities;
+ }
+
+ public List getCreationOrder() {
+ return creationOrder;
+ }
+
+ public void setCreationOrder(List creationOrder) {
+ this.creationOrder = creationOrder;
+ }
+
+ @Override
+ public String toString() {
+ return "ImportDetails{" +
+ "publishedEntityCount=" + publishedEntityCount +
+ ", totalEntitiesCount=" + totalEntitiesCount +
+ ", importedEntitiesCount=" + importedEntitiesCount +
+ ", failedEntitiesCount=" + failedEntitiesCount +
+ ", importProgress=" + importProgress +
+ ", failures=" + failures +
+ '}';
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ } else if (!(o instanceof ImportDetails)) {
+ return false;
+ }
+
+ ImportDetails that = (ImportDetails) o;
+
+ return publishedEntityCount == that.publishedEntityCount &&
+ totalEntitiesCount == that.totalEntitiesCount &&
+ importedEntitiesCount == that.importedEntitiesCount &&
+ failedEntitiesCount == that.failedEntitiesCount &&
+ Float.compare(that.importProgress, importProgress) == 0 &&
+ Objects.equals(failures, that.failures);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(publishedEntityCount, totalEntitiesCount, importedEntitiesCount, failedEntitiesCount, importProgress, failures);
+ }
+ }
+
+ @JsonAutoDetect(getterVisibility = PUBLIC_ONLY, setterVisibility = PUBLIC_ONLY, fieldVisibility = NONE)
+ @JsonInclude(JsonInclude.Include.NON_NULL)
+ @JsonIgnoreProperties(ignoreUnknown = true)
+ public static class ImportTrackingInfo implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private String requestId;
+ private int startEntityPosition;
+
+ public ImportTrackingInfo() {
+ }
+
+ public ImportTrackingInfo(String requestId, int startEntityPosition) {
+ this.requestId = requestId;
+ this.startEntityPosition = startEntityPosition;
+ }
+
+ public String getRequestId() {
+ return requestId;
+ }
+
+ public void setRequestId(String requestId) {
+ this.requestId = requestId;
+ }
+
+ public int getStartEntityPosition() {
+ return startEntityPosition;
+ }
+
+ public void setStartEntityPosition(int startEntityPosition) {
+ this.startEntityPosition = startEntityPosition;
+ }
+
+ @Override
+ public String toString() {
+ return "ImportTrackingInfo{" +
+ "requestId='" + requestId + '\'' +
+ ", startEntityPosition=" + startEntityPosition +
+ '}';
+ }
+ }
+}
diff --git a/intg/src/main/java/org/apache/atlas/model/impexp/AtlasImportResult.java b/intg/src/main/java/org/apache/atlas/model/impexp/AtlasImportResult.java
index 4c1a7159a6..ca689aad29 100644
--- a/intg/src/main/java/org/apache/atlas/model/impexp/AtlasImportResult.java
+++ b/intg/src/main/java/org/apache/atlas/model/impexp/AtlasImportResult.java
@@ -170,6 +170,7 @@ public StringBuilder toString(StringBuilder sb) {
sb.append(", processedEntities=[");
AtlasBaseTypeDef.dumpObjects(processedEntities, sb);
sb.append("]");
+ sb.append(", exportResult={").append(exportResultWithoutData).append("}");
sb.append("}");
return sb;
diff --git a/intg/src/main/java/org/apache/atlas/model/notification/HookNotification.java b/intg/src/main/java/org/apache/atlas/model/notification/HookNotification.java
index 6256543995..8be4aae0a5 100644
--- a/intg/src/main/java/org/apache/atlas/model/notification/HookNotification.java
+++ b/intg/src/main/java/org/apache/atlas/model/notification/HookNotification.java
@@ -105,7 +105,8 @@ public StringBuilder toString(StringBuilder sb) {
*/
public enum HookNotificationType {
TYPE_CREATE, TYPE_UPDATE, ENTITY_CREATE, ENTITY_PARTIAL_UPDATE, ENTITY_FULL_UPDATE, ENTITY_DELETE,
- ENTITY_CREATE_V2, ENTITY_PARTIAL_UPDATE_V2, ENTITY_FULL_UPDATE_V2, ENTITY_DELETE_V2
+ ENTITY_CREATE_V2, ENTITY_PARTIAL_UPDATE_V2, ENTITY_FULL_UPDATE_V2, ENTITY_DELETE_V2,
+ IMPORT_TYPES_DEF, IMPORT_ENTITY
}
@JsonAutoDetect(getterVisibility = PUBLIC_ONLY, setterVisibility = PUBLIC_ONLY, fieldVisibility = NONE)
diff --git a/intg/src/main/java/org/apache/atlas/model/notification/ImportNotification.java b/intg/src/main/java/org/apache/atlas/model/notification/ImportNotification.java
new file mode 100644
index 0000000000..5c28725c9e
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/model/notification/ImportNotification.java
@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.model.notification;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
+import org.apache.atlas.model.typedef.AtlasTypesDef;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import java.io.Serializable;
+
+import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE;
+import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+/**
+ * Class representing atlas import notification, extending HookNotification.
+ */
+@JsonAutoDetect(getterVisibility = PUBLIC_ONLY, setterVisibility = PUBLIC_ONLY, fieldVisibility = NONE)
+@JsonInclude
+@JsonIgnoreProperties(ignoreUnknown = true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class ImportNotification extends HookNotification implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ @JsonProperty
+ private String importId;
+
+ protected ImportNotification() {
+ }
+
+ protected ImportNotification(HookNotificationType type, String user, String importId) {
+ super(type, user);
+
+ this.importId = importId;
+ }
+
+ public String getImportId() {
+ return importId;
+ }
+
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("ImportNotification{");
+ super.toString(sb);
+ sb.append(", type=").append(type);
+ sb.append(", user=").append(user);
+ sb.append(", importId=").append(importId);
+ sb.append("}");
+
+ return sb;
+ }
+
+ /**
+ * Notification for type definitions import
+ */
+ @JsonAutoDetect(getterVisibility = PUBLIC_ONLY, setterVisibility = PUBLIC_ONLY, fieldVisibility = NONE)
+ @JsonInclude(JsonInclude.Include.NON_NULL)
+ @JsonIgnoreProperties(ignoreUnknown = true)
+ @XmlRootElement
+ @XmlAccessorType(XmlAccessType.PROPERTY)
+ public static class AtlasTypesDefImportNotification extends ImportNotification implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ @JsonProperty
+ private AtlasTypesDef typesDef;
+
+ public AtlasTypesDefImportNotification() {
+ }
+
+ public AtlasTypesDefImportNotification(String importId, String user, AtlasTypesDef typesDef) {
+ super(HookNotificationType.IMPORT_TYPES_DEF, user, importId);
+
+ this.typesDef = typesDef;
+ }
+
+ public AtlasTypesDef getTypesDef() {
+ return typesDef;
+ }
+
+ @Override
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("AtlasTypeDefImportNotification{");
+ super.toString(sb);
+ sb.append(", typesDef=").append(typesDef);
+ sb.append("}");
+
+ return sb;
+ }
+ }
+
+ /**
+ * Notification for entities import
+ */
+ @JsonAutoDetect(getterVisibility = PUBLIC_ONLY, setterVisibility = PUBLIC_ONLY, fieldVisibility = NONE)
+ @JsonInclude
+ @JsonIgnoreProperties(ignoreUnknown = true)
+ @XmlRootElement
+ @XmlAccessorType(XmlAccessType.PROPERTY)
+ public static class AtlasEntityImportNotification extends ImportNotification implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ @JsonProperty
+ private AtlasEntityWithExtInfo entity;
+
+ @JsonProperty
+ private int position;
+
+ public AtlasEntityImportNotification() {
+ }
+
+ public AtlasEntityImportNotification(String importId, String user, AtlasEntityWithExtInfo entity, int position) {
+ super(HookNotificationType.IMPORT_ENTITY, user, importId);
+
+ this.entity = entity;
+ this.position = position;
+ }
+
+ public AtlasEntityWithExtInfo getEntity() {
+ return entity;
+ }
+
+ public int getPosition() {
+ return position;
+ }
+
+ @Override
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("AtlasEntityImportNotification{");
+ super.toString(sb);
+ sb.append(", entity=").append(entity);
+ sb.append(", position=").append(position);
+ sb.append("}");
+
+ return sb;
+ }
+ }
+}
diff --git a/intg/src/main/java/org/apache/atlas/utils/AtlasAsyncImportTestUtil.java b/intg/src/main/java/org/apache/atlas/utils/AtlasAsyncImportTestUtil.java
new file mode 100644
index 0000000000..2432550058
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/utils/AtlasAsyncImportTestUtil.java
@@ -0,0 +1,60 @@
+package org.apache.atlas.utils;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
+import org.apache.atlas.model.impexp.AtlasImportRequest;
+import org.apache.atlas.model.impexp.AtlasImportResult;
+
+import static org.apache.atlas.AtlasConfiguration.ATLAS_ASYNC_IMPORT_MIN_DURATION_OVERRIDE_TEST_AUTOMATION;
+
+public class AtlasAsyncImportTestUtil {
+ public static final String OPTION_KEY_ASYNC_IMPORT_MIN_DURATION_IN_MS = "asyncImportMinDurationInMs";
+
+ private AtlasAsyncImportTestUtil() {
+ // to block instantiation
+ }
+
+ public static long intercept(AtlasAsyncImportRequest asyncImportRequest) {
+ if (ATLAS_ASYNC_IMPORT_MIN_DURATION_OVERRIDE_TEST_AUTOMATION.getBoolean()) {
+ AtlasImportResult importResult = asyncImportRequest.getImportResult();
+
+ if (importResult != null) {
+ AtlasImportRequest importRequest = importResult.getRequest();
+
+ if (importRequest != null) {
+ long minImportDurationInMs = Long.parseLong(importRequest.getOptions().getOrDefault(OPTION_KEY_ASYNC_IMPORT_MIN_DURATION_IN_MS, "0"));
+ long waitTimeInMs = minImportDurationInMs - (asyncImportRequest.getCompletedTime() - asyncImportRequest.getReceivedTime());
+
+ if (waitTimeInMs > 0) {
+ try {
+ Thread.sleep(waitTimeInMs);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ }
+ }
+
+ return waitTimeInMs;
+ }
+ }
+ }
+
+ return -1;
+ }
+}
diff --git a/intg/src/main/java/org/apache/atlas/utils/AtlasJson.java b/intg/src/main/java/org/apache/atlas/utils/AtlasJson.java
index b31586d00c..a2bf8ef2ec 100644
--- a/intg/src/main/java/org/apache/atlas/utils/AtlasJson.java
+++ b/intg/src/main/java/org/apache/atlas/utils/AtlasJson.java
@@ -43,6 +43,7 @@
import org.apache.atlas.model.notification.HookNotification.EntityPartialUpdateRequestV2;
import org.apache.atlas.model.notification.HookNotification.EntityUpdateRequestV2;
import org.apache.atlas.model.notification.HookNotification.HookNotificationType;
+import org.apache.atlas.model.notification.ImportNotification;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
import org.apache.atlas.v1.model.instance.AtlasSystemAttributes;
import org.apache.atlas.v1.model.instance.Id;
@@ -382,6 +383,14 @@ public HookNotification deserialize(JsonParser parser, DeserializationContext co
case ENTITY_DELETE_V2:
ret = mapper.treeToValue(root, EntityDeleteRequestV2.class);
break;
+
+ case IMPORT_TYPES_DEF:
+ ret = mapper.treeToValue(root, ImportNotification.AtlasTypesDefImportNotification.class);
+ break;
+
+ case IMPORT_ENTITY:
+ ret = mapper.treeToValue(root, ImportNotification.AtlasEntityImportNotification.class);
+ break;
}
}
diff --git a/intg/src/test/java/org/apache/atlas/utils/TestAtlasAsyncImportTestUtil.java b/intg/src/test/java/org/apache/atlas/utils/TestAtlasAsyncImportTestUtil.java
new file mode 100644
index 0000000000..21904be789
--- /dev/null
+++ b/intg/src/test/java/org/apache/atlas/utils/TestAtlasAsyncImportTestUtil.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.utils;
+
+import org.apache.atlas.ApplicationProperties;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
+import org.apache.atlas.model.impexp.AtlasImportRequest;
+import org.apache.atlas.model.impexp.AtlasImportResult;
+import org.apache.commons.configuration.Configuration;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import static org.apache.atlas.AtlasConfiguration.ATLAS_ASYNC_IMPORT_MIN_DURATION_OVERRIDE_TEST_AUTOMATION;
+import static org.apache.atlas.utils.AtlasAsyncImportTestUtil.OPTION_KEY_ASYNC_IMPORT_MIN_DURATION_IN_MS;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+
+public class TestAtlasAsyncImportTestUtil {
+ private Configuration conf;
+
+ @BeforeClass
+ public void setup() throws Exception {
+ conf = ApplicationProperties.get();
+ }
+
+ @Test
+ public void testInterceptWaitsForRemainingTimeWhenOverrideEnabled() {
+ // Given
+ conf.setProperty(ATLAS_ASYNC_IMPORT_MIN_DURATION_OVERRIDE_TEST_AUTOMATION.getPropertyName(), true);
+
+ AtlasImportRequest importRequest = new AtlasImportRequest();
+ AtlasImportResult importResult = new AtlasImportResult();
+
+ importRequest.setOption(OPTION_KEY_ASYNC_IMPORT_MIN_DURATION_IN_MS, "3000");
+ importResult.setRequest(importRequest);
+
+ AtlasAsyncImportRequest asyncRequest = new AtlasAsyncImportRequest(importResult);
+
+ // Explicitly simulate timing scenario without dependency on actual clock
+ long simulatedReceivedTime = 10000L; // Arbitrary, stable value
+ long simulatedCompletedTime = 11000L; // Simulate completion after 1000ms (1 sec)
+
+ asyncRequest.setReceivedTime(simulatedReceivedTime);
+ asyncRequest.setCompletedTime(simulatedCompletedTime);
+
+ long expectedWaitTime = 2000L; // Min duration (3000ms) - elapsed (1000ms)
+
+ long waitTimeInMs = AtlasAsyncImportTestUtil.intercept(asyncRequest);
+
+ assertEquals(waitTimeInMs, expectedWaitTime, "Should wait exactly 2000ms");
+ }
+
+ @Test
+ public void testInterceptSkipsSleepWhenDurationAlreadyMet() {
+ // Given
+ conf.setProperty(ATLAS_ASYNC_IMPORT_MIN_DURATION_OVERRIDE_TEST_AUTOMATION.getPropertyName(), true);
+
+ AtlasImportRequest importRequest = new AtlasImportRequest();
+ AtlasImportResult importResult = new AtlasImportResult();
+
+ importRequest.setOption(OPTION_KEY_ASYNC_IMPORT_MIN_DURATION_IN_MS, "3000");
+ importResult.setRequest(importRequest);
+
+ AtlasAsyncImportRequest asyncRequest = new AtlasAsyncImportRequest(importResult);
+
+ // Explicit fixed timestamps to ensure stability
+ long simulatedReceivedTime = 10000L; // arbitrary fixed start timestamp
+ long simulatedCompletedTime = 14000L; // completed after 4000ms, exceeding the 3000ms min duration
+
+ asyncRequest.setReceivedTime(simulatedReceivedTime);
+ asyncRequest.setCompletedTime(simulatedCompletedTime);
+
+ long waitTimeInMs = AtlasAsyncImportTestUtil.intercept(asyncRequest);
+
+ // Then
+ assertTrue(waitTimeInMs < 0, "Should not sleep as duration already exceeded");
+ }
+
+ @Test
+ public void testInterceptSkipsSleepWheOverrideIsDisabled() {
+ // Given
+ conf.setProperty(ATLAS_ASYNC_IMPORT_MIN_DURATION_OVERRIDE_TEST_AUTOMATION.getPropertyName(), false);
+
+ AtlasImportRequest importRequest = new AtlasImportRequest();
+ AtlasImportResult importResult = new AtlasImportResult();
+
+ importRequest.setOption(OPTION_KEY_ASYNC_IMPORT_MIN_DURATION_IN_MS, "3000");
+ importResult.setRequest(importRequest);
+
+ AtlasAsyncImportRequest asyncRequest = new AtlasAsyncImportRequest(importResult);
+
+ long waitTimeInMs = AtlasAsyncImportTestUtil.intercept(asyncRequest);
+
+ // Then
+ // Ensure that we did not actually sleep (i.e., took less than 200ms)
+ assertEquals(waitTimeInMs, -1, "intercept() should not sleep when override is disabled");
+ }
+}
diff --git a/notification/src/main/java/org/apache/atlas/kafka/AtlasKafkaConsumer.java b/notification/src/main/java/org/apache/atlas/kafka/AtlasKafkaConsumer.java
index 9c1537f2c1..239691caac 100644
--- a/notification/src/main/java/org/apache/atlas/kafka/AtlasKafkaConsumer.java
+++ b/notification/src/main/java/org/apache/atlas/kafka/AtlasKafkaConsumer.java
@@ -33,6 +33,7 @@
import java.util.Collections;
import java.util.List;
import java.util.Map;
+import java.util.Set;
/**
* Kafka specific notification consumer.
@@ -58,6 +59,16 @@ public AtlasKafkaConsumer(AtlasNotificationMessageDeserializer deserializer,
this.pollTimeoutMilliSeconds = pollTimeoutMilliSeconds;
}
+ @Override
+ public Set getTopicPartition() {
+ return kafkaConsumer != null ? kafkaConsumer.assignment() : null;
+ }
+
+ @Override
+ public Set subscription() {
+ return kafkaConsumer != null ? kafkaConsumer.subscription() : null;
+ }
+
@Override
public void commit(TopicPartition partition, long offset) {
if (!autoCommitEnabled) {
diff --git a/notification/src/main/java/org/apache/atlas/kafka/KafkaNotification.java b/notification/src/main/java/org/apache/atlas/kafka/KafkaNotification.java
index 61e3053e48..aea87a75a2 100644
--- a/notification/src/main/java/org/apache/atlas/kafka/KafkaNotification.java
+++ b/notification/src/main/java/org/apache/atlas/kafka/KafkaNotification.java
@@ -20,7 +20,9 @@
import com.google.common.annotations.VisibleForTesting;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasConfiguration;
+import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
+import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.notification.AbstractNotification;
import org.apache.atlas.notification.NotificationConsumer;
import org.apache.atlas.notification.NotificationException;
@@ -29,6 +31,7 @@
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationConverter;
import org.apache.commons.lang.StringUtils;
+import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
@@ -49,6 +52,7 @@
import java.util.List;
import java.util.Map;
import java.util.Properties;
+import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import static org.apache.atlas.security.SecurityProperties.TLS_ENABLED;
@@ -77,8 +81,8 @@ public class KafkaNotification extends AbstractNotification implements Service {
private static final String[] ATLAS_ENTITIES_CONSUMER_TOPICS = AtlasConfiguration.NOTIFICATION_ENTITIES_CONSUMER_TOPIC_NAMES.getStringArray(ATLAS_ENTITIES_TOPIC);
private static final String DEFAULT_CONSUMER_CLOSED_ERROR_MESSAGE = "This consumer has already been closed.";
- private static final Map PRODUCER_TOPIC_MAP = new HashMap<>();
- private static final Map CONSUMER_TOPICS_MAP = new HashMap<>();
+ private static final Map PRODUCER_TOPIC_MAP = new HashMap<>();
+ private static final Map> CONSUMER_TOPICS_MAP = new HashMap<>();
private final Properties properties;
private final Long pollTimeOutMs;
@@ -149,7 +153,7 @@ protected KafkaNotification(Properties properties) {
LOG.info("<== KafkaNotification()");
}
- public static String[] trimAndPurge(String[] strings) {
+ public static List trimAndPurge(String[] strings) {
List ret = new ArrayList<>();
if (strings != null) {
@@ -162,7 +166,7 @@ public static String[] trimAndPurge(String[] strings) {
}
}
- return ret.toArray(new String[ret.size()]);
+ return ret;
}
@Override
@@ -205,6 +209,24 @@ public void close() {
LOG.info("<== KafkaNotification.close()");
}
+ @Override
+ public void closeConsumer(NotificationType notificationTypeToClose, String topic) {
+ this.consumers.computeIfPresent(notificationTypeToClose, (notificationType, notificationConsumers) -> {
+ notificationConsumers.removeIf(consumer -> {
+ if (consumer.subscription().contains(topic)) {
+ consumer.unsubscribe();
+ consumer.close();
+
+ return true;
+ }
+
+ return false;
+ });
+
+ return notificationConsumers.isEmpty() ? null : notificationConsumers;
+ });
+ }
+
// ----- NotificationInterface -------------------------------------------
public boolean isReady(NotificationType notificationType) {
try {
@@ -224,16 +246,16 @@ public boolean isReady(NotificationType notificationType) {
public List> createConsumers(NotificationType notificationType, int numConsumers, boolean autoCommitEnabled) {
LOG.info("==> KafkaNotification.createConsumers(notificationType={}, numConsumers={}, autoCommitEnabled={})", notificationType, numConsumers, autoCommitEnabled);
- String[] topics = CONSUMER_TOPICS_MAP.get(notificationType);
+ List topics = CONSUMER_TOPICS_MAP.getOrDefault(notificationType, Collections.emptyList());
- if (numConsumers < topics.length) {
- LOG.warn("consumers count {} is fewer than number of topics {}. Creating {} consumers, so that consumer count is equal to number of topics.", numConsumers, topics.length, topics.length);
+ if (numConsumers < topics.size()) {
+ LOG.warn("consumers count {} is fewer than number of topics {}. Creating {} consumers, so that consumer count is equal to number of topics.", numConsumers, topics.size(), topics.size());
- numConsumers = topics.length;
- } else if (numConsumers > topics.length) {
- LOG.warn("consumers count {} is higher than number of topics {}. Creating {} consumers, so that consumer count is equal to number of topics", numConsumers, topics.length, topics.length);
+ numConsumers = topics.size();
+ } else if (numConsumers > topics.size()) {
+ LOG.warn("consumers count {} is higher than number of topics {}. Creating {} consumers, so that consumer count is equal to number of topics", numConsumers, topics.size(), topics.size());
- numConsumers = topics.length;
+ numConsumers = topics.size();
}
List notificationConsumers = this.consumers.get(notificationType);
@@ -281,9 +303,7 @@ public void sendInternal(String topic, List messages, boolean isSortNeed
}
public void sendInternal(String topic, List messages) throws NotificationException {
- KafkaProducer producer = getOrCreateProducer(topic);
-
- sendInternalToProducer(producer, topic, messages);
+ sendInternal(topic, messages, false);
}
// ----- AbstractNotification --------------------------------------------
@@ -301,7 +321,7 @@ public Properties getConsumerProperties(NotificationType notificationType) {
String groupId = properties.getProperty(notificationType.toString().toLowerCase() + "." + CONSUMER_GROUP_ID_PROPERTY);
if (StringUtils.isEmpty(groupId)) {
- groupId = "atlas";
+ groupId = notificationType.equals(NotificationType.ASYNC_IMPORT) ? "atlas-import" : "atlas";
}
if (StringUtils.isEmpty(groupId)) {
@@ -322,8 +342,8 @@ public KafkaConsumer getOrCreateKafkaConsumer(KafkaConsumer existingConsumer, Pr
try {
if (ret == null || !isKafkaConsumerOpen(ret)) {
- String[] topics = CONSUMER_TOPICS_MAP.get(notificationType);
- String topic = topics[idxConsumer % topics.length];
+ List topics = CONSUMER_TOPICS_MAP.getOrDefault(notificationType, Collections.emptyList());
+ String topic = topics.get(idxConsumer % topics.size());
LOG.debug("Creating new KafkaConsumer for topic : {}, index : {}", topic, idxConsumer);
@@ -429,6 +449,48 @@ private KafkaProducer getOrCreateProducerByCriteria(Object producerCriteria, Map
return ret;
}
+ @Override
+ public void addTopicToNotificationType(NotificationType notificationType, String topic) throws AtlasBaseException {
+ try (AdminClient adminClient = AdminClient.create(this.properties)) {
+ // checking if a topic exists with the name before adding to consumers.
+ if (adminClient.listTopics().names().get().contains(topic)) {
+ CONSUMER_TOPICS_MAP.computeIfAbsent(notificationType, k -> new ArrayList<>()).add(topic);
+ return;
+ }
+ LOG.error("Failed to add consumer for notificationType={}, topic={}: topic does not exist", notificationType, topic);
+ throw new AtlasBaseException(AtlasErrorCode.INVALID_TOPIC_NAME);
+ } catch (ExecutionException | InterruptedException e) {
+ LOG.error("Failed to add consumer for notificationType={}, topic={}", notificationType, topic, e);
+ throw new AtlasBaseException(AtlasErrorCode.INVALID_TOPIC_NAME, e);
+ }
+ }
+
+ @Override
+ public void closeProducer(NotificationType notificationType, String topic) {
+ producersByTopic.computeIfPresent(topic, (key, producer) -> {
+ // Close the KafkaProducer before removal
+ producer.close();
+
+ // Returning null removes the key from the map
+ return null;
+ });
+
+ PRODUCER_TOPIC_MAP.remove(notificationType, topic);
+ }
+
+ @Override
+ public void deleteTopic(NotificationType notificationType, String topicName) {
+ try (AdminClient adminClient = AdminClient.create(this.properties)) {
+ adminClient.deleteTopics(Collections.singleton(topicName));
+ }
+
+ CONSUMER_TOPICS_MAP.computeIfPresent(notificationType, (key, topics) -> {
+ topics.remove(topicName);
+
+ return topics.isEmpty() ? null : topics;
+ });
+ }
+
// kafka-client doesn't have method to check if consumer is open, hence checking list topics and catching exception
private boolean isKafkaConsumerOpen(KafkaConsumer consumer) {
boolean ret = true;
diff --git a/notification/src/main/java/org/apache/atlas/notification/AbstractNotification.java b/notification/src/main/java/org/apache/atlas/notification/AbstractNotification.java
index fab7fc0801..c7d3df46b4 100644
--- a/notification/src/main/java/org/apache/atlas/notification/AbstractNotification.java
+++ b/notification/src/main/java/org/apache/atlas/notification/AbstractNotification.java
@@ -176,6 +176,16 @@ public void init(String source, Object failedMessagesLogger) {
*/
public abstract void sendInternal(NotificationType type, List messages) throws NotificationException;
+ /**
+ * Send the given messages to given topic.
+ *
+ * @param topic the kafka topic
+ * @param messages the array of messages to send
+ *
+ * @throws NotificationException if an error occurs while sending
+ */
+ public abstract void sendInternal(String topic, List messages) throws NotificationException;
+
private static String getHostAddress() {
if (StringUtils.isEmpty(localHostAddress)) {
try {
@@ -224,6 +234,17 @@ public void send(NotificationType type, List messages, MessageSource sour
sendInternal(type, strMessages);
}
+ @Override
+ public void send(String topic, List messages, MessageSource source) throws NotificationException {
+ List strMessages = new ArrayList<>(messages.size());
+
+ for (T message : messages) {
+ createNotificationMessages(message, strMessages, source);
+ }
+
+ sendInternal(topic, strMessages);
+ }
+
private static String getNextMessageId() {
String nextMsgIdPrefix = msgIdPrefix;
int nextMsgIdSuffix = msgIdSuffix.getAndIncrement();
diff --git a/notification/src/main/java/org/apache/atlas/notification/NotificationConsumer.java b/notification/src/main/java/org/apache/atlas/notification/NotificationConsumer.java
index 7f5f5586bf..2348b322bd 100644
--- a/notification/src/main/java/org/apache/atlas/notification/NotificationConsumer.java
+++ b/notification/src/main/java/org/apache/atlas/notification/NotificationConsumer.java
@@ -22,6 +22,7 @@
import java.util.List;
import java.util.Map;
+import java.util.Set;
/**
* Atlas notification consumer. This consumer blocks until a notification can be read.
@@ -68,4 +69,8 @@ public interface NotificationConsumer {
* @return List containing kafka message and partitionId and offset.
*/
List> receiveRawRecordsWithCheckedCommit(Map lastCommittedPartitionOffset);
+
+ Set getTopicPartition();
+
+ Set subscription();
}
diff --git a/notification/src/main/java/org/apache/atlas/notification/NotificationInterface.java b/notification/src/main/java/org/apache/atlas/notification/NotificationInterface.java
index 5bf5ddbfbd..bcb58f2839 100644
--- a/notification/src/main/java/org/apache/atlas/notification/NotificationInterface.java
+++ b/notification/src/main/java/org/apache/atlas/notification/NotificationInterface.java
@@ -17,6 +17,7 @@
*/
package org.apache.atlas.notification;
+import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.notification.MessageSource;
import org.apache.atlas.notification.entity.EntityMessageDeserializer;
import org.apache.atlas.notification.hook.HookMessageDeserializer;
@@ -101,6 +102,47 @@ public interface NotificationInterface {
*/
boolean isReady(NotificationType type);
+ /**
+ * Abstract notification wiring for async import messages
+ * @param topic async import topic to publish
+ * @param messages messages to send
+ * @param source source of the message
+ */
+ default void send(String topic, List messages, MessageSource source) throws NotificationException {}
+
+ /**
+ * Associates the specified topic with the given notification type.
+ *
+ * @param notificationType The type of notification to which the topic should be added.
+ * @param topic The name of the topic to be associated with the notification type.
+ */
+ default void addTopicToNotificationType(NotificationType notificationType, String topic) throws AtlasBaseException {}
+
+ /**
+ * Closes the producer associated with the specified notification type and topic.
+ *
+ * @param notificationType The type of notification for which the producer is to be closed.
+ * @param topic The name of the topic associated with the producer.
+ */
+ default void closeProducer(NotificationType notificationType, String topic) {}
+
+ /**
+ * Deletes the specified topic associated with the given notification type.
+ *
+ * @param notificationType The type of notification related to the topic.
+ * @param topicName The name of the topic to be deleted.
+ */
+ default void deleteTopic(NotificationType notificationType, String topicName) {}
+
+ /**
+ * Closes the consumer associated with the specified notification type.
+ *
+ * @param notificationType The type of notification for which the consumer is to be closed.
+ * @param topic The consumer to close with assignment.
+ *
+ */
+ default void closeConsumer(NotificationType notificationType, String topic) {}
+
/**
* Atlas notification types.
*/
@@ -112,7 +154,10 @@ enum NotificationType {
HOOK_UNSORTED(new HookMessageDeserializer()),
// Notifications to entity change consumers.
- ENTITIES(new EntityMessageDeserializer());
+ ENTITIES(new EntityMessageDeserializer()),
+
+ // Notifications from Atlas async importer
+ ASYNC_IMPORT(new HookMessageDeserializer());
private final AtlasNotificationMessageDeserializer deserializer;
diff --git a/notification/src/main/java/org/apache/atlas/notification/rest/RestNotification.java b/notification/src/main/java/org/apache/atlas/notification/rest/RestNotification.java
index 5a35b037c5..f0833b12aa 100644
--- a/notification/src/main/java/org/apache/atlas/notification/rest/RestNotification.java
+++ b/notification/src/main/java/org/apache/atlas/notification/rest/RestNotification.java
@@ -29,6 +29,7 @@
import org.apache.atlas.notification.NotificationException;
import org.apache.atlas.utils.AuthenticationUtil;
import org.apache.commons.configuration.Configuration;
+import org.apache.commons.lang.NotImplementedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -84,6 +85,11 @@ public void sendInternal(NotificationType type, List messages) throws No
}
}
+ @Override
+ public void sendInternal(String topic, List messages) throws NotificationException {
+ throw new NotImplementedException("sendInternal method is not implemented.");
+ }
+
@Override
public List> createConsumers(NotificationType notificationType, int numConsumers) {
return null;
diff --git a/notification/src/main/java/org/apache/atlas/notification/spool/Spooler.java b/notification/src/main/java/org/apache/atlas/notification/spool/Spooler.java
index df07f0aa19..4c4cf35cca 100644
--- a/notification/src/main/java/org/apache/atlas/notification/spool/Spooler.java
+++ b/notification/src/main/java/org/apache/atlas/notification/spool/Spooler.java
@@ -22,7 +22,9 @@
import org.apache.atlas.model.notification.AtlasNotificationMessage;
import org.apache.atlas.notification.AbstractNotification;
import org.apache.atlas.notification.NotificationConsumer;
+import org.apache.atlas.notification.NotificationException;
import org.apache.atlas.type.AtlasType;
+import org.apache.commons.lang.NotImplementedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -81,6 +83,11 @@ public void sendInternal(NotificationType type, List messages) {
}
}
+ @Override
+ public void sendInternal(String topic, List messages) throws NotificationException {
+ throw new NotImplementedException("sendInternal method is not implemented.");
+ }
+
@VisibleForTesting
boolean write(List messages) {
final boolean ret;
@@ -91,7 +98,7 @@ boolean write(List messages) {
ret = writeInternal(messages);
} else {
- LOG.error("Spooler.write(source={}): called after stop is called! Write will not be performed!", configuration.getSourceName(), messages);
+ LOG.error("Spooler.write(source={}): called after stop is called! {} messages will not be written to spool!", configuration.getSourceName(), (messages != null ? messages.size() : 0));
ret = false;
}
diff --git a/notification/src/test/java/org/apache/atlas/kafka/KafkaConsumerTest.java b/notification/src/test/java/org/apache/atlas/kafka/KafkaConsumerTest.java
index 23c2b424fa..733bc0f2fe 100644
--- a/notification/src/test/java/org/apache/atlas/kafka/KafkaConsumerTest.java
+++ b/notification/src/test/java/org/apache/atlas/kafka/KafkaConsumerTest.java
@@ -57,8 +57,8 @@
public class KafkaConsumerTest {
private static final String TRAIT_NAME = "MyTrait";
- private static final String ATLAS_HOOK_TOPIC = AtlasConfiguration.NOTIFICATION_HOOK_TOPIC_NAME.getString();
- private static final String[] ATLAS_HOOK_CONSUMER_TOPICS = KafkaNotification.trimAndPurge(AtlasConfiguration.NOTIFICATION_HOOK_CONSUMER_TOPIC_NAMES.getStringArray(ATLAS_HOOK_TOPIC));
+ private static final String ATLAS_HOOK_TOPIC = AtlasConfiguration.NOTIFICATION_HOOK_TOPIC_NAME.getString();
+ private static final List ATLAS_HOOK_CONSUMER_TOPICS = KafkaNotification.trimAndPurge(AtlasConfiguration.NOTIFICATION_HOOK_CONSUMER_TOPIC_NAMES.getStringArray(ATLAS_HOOK_TOPIC));
@Mock
private KafkaConsumer kafkaConsumer;
diff --git a/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationConsumerTest.java b/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationConsumerTest.java
index 5bcc234717..8917e9f31a 100644
--- a/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationConsumerTest.java
+++ b/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationConsumerTest.java
@@ -29,10 +29,12 @@
import org.testng.annotations.Test;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
+import java.util.Set;
import static org.mockito.Mockito.mock;
import static org.testng.Assert.assertEquals;
@@ -184,6 +186,16 @@ public void close() {
public void wakeup() {
}
+ @Override
+ public Set getTopicPartition() {
+ return Collections.emptySet();
+ }
+
+ @Override
+ public Set subscription() {
+ return Collections.emptySet();
+ }
+
@Override
public List> receive() {
return receive(1000L);
diff --git a/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationTest.java b/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationTest.java
index 5d7791471a..93e86256d9 100644
--- a/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationTest.java
+++ b/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationTest.java
@@ -87,6 +87,30 @@ public void testSend2() throws Exception {
}
}
+ @org.testng.annotations.Test
+ public void testSend3() throws Exception {
+ MessageSource source = new MessageSource();
+ Configuration configuration = mock(Configuration.class);
+ TestNotification notification = new TestNotification(configuration);
+ Test message1 = new Test(HookNotificationType.IMPORT_ENTITY, "user1");
+ Test message2 = new Test(HookNotificationType.IMPORT_TYPES_DEF, "user1");
+ String topic = "ATLAS_IMPORT_21334wqdrr";
+ List messages = Arrays.asList(message1, message2);
+ List messageJson = new ArrayList<>();
+
+ AbstractNotification.createNotificationMessages(message1, messageJson, source);
+ AbstractNotification.createNotificationMessages(message2, messageJson, source);
+
+ notification.send(topic, messages, source);
+
+ assertEquals(notification.messages.size(), messageJson.size());
+ assertEquals(notification.topic, topic);
+
+ for (int i = 0; i < notification.messages.size(); i++) {
+ assertEqualsMessageJson(notification.messages.get(i), messageJson.get(i));
+ }
+ }
+
// ignore msgCreationTime in Json
private void assertEqualsMessageJson(String msgJsonActual, String msgJsonExpected) {
Map msgActual = AtlasType.fromV1Json(msgJsonActual, Map.class);
@@ -106,6 +130,7 @@ public Test(HookNotificationType type, String user) {
public static class TestNotification extends AbstractNotification {
private NotificationType type;
+ private String topic;
private List messages;
public TestNotification(Configuration applicationProperties) throws AtlasException {
@@ -118,6 +143,12 @@ public void sendInternal(NotificationType notificationType, List notific
messages = notificationMessages;
}
+ @Override
+ public void sendInternal(String notificationTopic, List notificationMessages) throws NotificationException {
+ topic = notificationTopic;
+ messages = notificationMessages;
+ }
+
@Override
public List> createConsumers(NotificationType notificationType, int numConsumers) {
return null;
diff --git a/notification/src/test/java/org/apache/atlas/notification/spool/AtlasFileSpoolTest.java b/notification/src/test/java/org/apache/atlas/notification/spool/AtlasFileSpoolTest.java
index 37fd165a28..c186f696c0 100644
--- a/notification/src/test/java/org/apache/atlas/notification/spool/AtlasFileSpoolTest.java
+++ b/notification/src/test/java/org/apache/atlas/notification/spool/AtlasFileSpoolTest.java
@@ -22,6 +22,7 @@
import org.apache.atlas.notification.NotificationConsumer;
import org.apache.atlas.notification.NotificationException;
import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang3.RandomUtils;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;
@@ -35,6 +36,7 @@
import static org.apache.atlas.notification.NotificationInterface.NotificationType.HOOK;
import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertThrows;
import static org.testng.Assert.assertTrue;
public class AtlasFileSpoolTest extends BaseTest {
@@ -139,6 +141,18 @@ public void concurrentWriteAndPublish() throws InterruptedException, IOException
}
}
+ @Test
+ public void notImplementedSendInternalTest() throws IOException, AtlasException {
+ SpoolConfiguration cfg = getSpoolConfigurationTest();
+ IndexManagement indexManagement = new IndexManagement(cfg);
+
+ indexManagement.init();
+
+ Spooler spooler = new Spooler(cfg, indexManagement);
+
+ assertThrows(UnsupportedOperationException.class, () -> spooler.sendInternal("topic", Collections.emptyList()));
+ }
+
@AfterClass
public void tearDown() {
FileUtils.deleteQuietly(new File(spoolDirTest));
@@ -160,6 +174,11 @@ public void sendInternal(NotificationType type, List messages) {
publishedMessages.addAll(messages);
}
+ @Override
+ public void sendInternal(String topic, List messages) throws NotificationException {
+ throw new NotImplementedException("sendInternal method is not implemented.");
+ }
+
@Override
public void setCurrentUser(String user) {
}
diff --git a/repository/pom.xml b/repository/pom.xml
index 0004b89806..fcadcd1976 100644
--- a/repository/pom.xml
+++ b/repository/pom.xml
@@ -115,7 +115,10 @@
-
+
+ org.apache.atlas
+ atlas-notification
+
org.apache.atlas
atlas-server-api
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexer.java b/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexer.java
index 4b517c7631..4512ac63df 100755
--- a/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexer.java
+++ b/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexer.java
@@ -615,6 +615,9 @@ private void initialize(AtlasGraph graph) throws RepositoryException, IndexExcep
createCommonVertexIndex(management, " __AtlasMetricsStat.collectionTime", UniqueKind.GLOBAL_UNIQUE, String.class, SINGLE, true, false);
createCommonVertexIndex(management, " __AtlasMetricsStat.timeToLiveMillis", UniqueKind.NONE, String.class, SINGLE, true, false);
+ // atlas async import request index
+ createCommonVertexIndex(management, "__AtlasAsyncImportRequest.importId", UniqueKind.GLOBAL_UNIQUE, String.class, SINGLE, true, true);
+
// create vertex-centric index
createVertexCentricIndex(management, CLASSIFICATION_LABEL, AtlasEdgeDirection.BOTH, CLASSIFICATION_EDGE_NAME_PROPERTY_KEY, String.class, SINGLE);
createVertexCentricIndex(management, CLASSIFICATION_LABEL, AtlasEdgeDirection.BOTH, CLASSIFICATION_EDGE_IS_PROPAGATED_PROPERTY_KEY, Boolean.class, SINGLE);
diff --git a/repository/src/main/java/org/apache/atlas/repository/impexp/AsyncImportService.java b/repository/src/main/java/org/apache/atlas/repository/impexp/AsyncImportService.java
new file mode 100644
index 0000000000..ef747755e4
--- /dev/null
+++ b/repository/src/main/java/org/apache/atlas/repository/impexp/AsyncImportService.java
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.repository.impexp;
+
+import org.apache.atlas.AtlasErrorCode;
+import org.apache.atlas.SortOrder;
+import org.apache.atlas.annotation.GraphTransaction;
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.PList;
+import org.apache.atlas.model.SearchFilter.SortType;
+import org.apache.atlas.model.impexp.AsyncImportStatus;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
+import org.apache.atlas.repository.ogm.DataAccess;
+import org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2;
+import org.apache.commons.collections.CollectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.StreamSupport;
+
+import static org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus;
+import static org.apache.atlas.repository.Constants.PROPERTY_KEY_ASYNC_IMPORT_ID;
+import static org.apache.atlas.repository.Constants.PROPERTY_KEY_ASYNC_IMPORT_STATUS;
+import static org.apache.atlas.repository.ogm.impexp.AtlasAsyncImportRequestDTO.ASYNC_IMPORT_TYPE_NAME;
+
+@Service
+public class AsyncImportService {
+ private static final Logger LOG = LoggerFactory.getLogger(AsyncImportService.class);
+
+ private final DataAccess dataAccess;
+
+ @Inject
+ public AsyncImportService(DataAccess dataAccess) {
+ this.dataAccess = dataAccess;
+ }
+
+ public AtlasAsyncImportRequest fetchImportRequestByImportId(String importId) {
+ try {
+ AtlasAsyncImportRequest request = new AtlasAsyncImportRequest();
+
+ request.setImportId(importId);
+
+ return dataAccess.load(request);
+ } catch (Exception e) {
+ LOG.error("Error fetching request with importId: {}", importId, e);
+
+ return null;
+ }
+ }
+
+ public void saveImportRequest(AtlasAsyncImportRequest importRequest) throws AtlasBaseException {
+ try {
+ dataAccess.save(importRequest);
+
+ LOG.debug("Save request ID: {} request: {}", importRequest.getImportId(), importRequest);
+ } catch (AtlasBaseException e) {
+ LOG.error("Failed to save import: {} with request: {}", importRequest.getImportId(), importRequest, e);
+
+ throw e;
+ }
+ }
+
+ public void updateImportRequest(AtlasAsyncImportRequest importRequest) {
+ try {
+ saveImportRequest(importRequest);
+ } catch (AtlasBaseException abe) {
+ LOG.error("Failed to update import: {} with request: {}", importRequest.getImportId(), importRequest, abe);
+ }
+ }
+
+ public List fetchInProgressImportIds() {
+ return AtlasGraphUtilsV2.findEntityPropertyValuesByTypeAndAttributes(ASYNC_IMPORT_TYPE_NAME,
+ Collections.singletonMap(PROPERTY_KEY_ASYNC_IMPORT_STATUS, ImportStatus.PROCESSING),
+ PROPERTY_KEY_ASYNC_IMPORT_ID);
+ }
+
+ public List fetchQueuedImportRequests() {
+ return AtlasGraphUtilsV2.findEntityPropertyValuesByTypeAndAttributes(ASYNC_IMPORT_TYPE_NAME,
+ Collections.singletonMap(PROPERTY_KEY_ASYNC_IMPORT_STATUS, ImportStatus.WAITING),
+ PROPERTY_KEY_ASYNC_IMPORT_ID);
+ }
+
+ public void deleteRequests() {
+ try {
+ dataAccess.delete(AtlasGraphUtilsV2.findEntityGUIDsByType(ASYNC_IMPORT_TYPE_NAME, SortOrder.ASCENDING));
+ } catch (Exception e) {
+ LOG.error("Error deleting import requests", e);
+ }
+ }
+
+ public AtlasAsyncImportRequest abortImport(String importId) throws AtlasBaseException {
+ AtlasAsyncImportRequest importRequestToKill = fetchImportRequestByImportId(importId);
+
+ try {
+ if (importRequestToKill == null) {
+ throw new AtlasBaseException(AtlasErrorCode.IMPORT_NOT_FOUND, importId);
+ }
+
+ if (importRequestToKill.getStatus().equals(ImportStatus.STAGING) || importRequestToKill.getStatus().equals(ImportStatus.WAITING)) {
+ importRequestToKill.setStatus(ImportStatus.ABORTED);
+
+ saveImportRequest(importRequestToKill);
+
+ LOG.info("Successfully aborted import request: {}", importId);
+ } else {
+ LOG.error("Cannot abort import request {}: request is in status: {}", importId, importRequestToKill.getStatus());
+
+ throw new AtlasBaseException(AtlasErrorCode.IMPORT_ABORT_NOT_ALLOWED, importId, importRequestToKill.getStatus().getStatus());
+ }
+ } catch (AtlasBaseException e) {
+ LOG.error("Failed to abort import request: {}", importId, e);
+
+ throw e;
+ }
+
+ return importRequestToKill;
+ }
+
+ @GraphTransaction
+ public PList getAsyncImportsStatus(int offset, int limit) throws AtlasBaseException {
+ LOG.debug("==> AsyncImportService.getAllImports()");
+
+ List allImportGuids = AtlasGraphUtilsV2.findEntityGUIDsByType(ASYNC_IMPORT_TYPE_NAME, SortOrder.ASCENDING);
+
+ List requestedPage;
+
+ if (CollectionUtils.isNotEmpty(allImportGuids)) {
+ List paginatedGuids = allImportGuids.stream().skip(offset).limit(limit).collect(Collectors.toList());
+
+ List importsToLoad = paginatedGuids.stream().map(AtlasAsyncImportRequest::new).collect(Collectors.toList());
+ Iterable loadedImports = dataAccess.load(importsToLoad);
+
+ requestedPage = StreamSupport.stream(loadedImports.spliterator(), false).map(AtlasAsyncImportRequest::toImportMinInfo).collect(Collectors.toList());
+ } else {
+ requestedPage = Collections.emptyList();
+ }
+
+ LOG.debug("<== AsyncImportService.getAllImports() : {}", requestedPage);
+
+ return new PList<>(requestedPage, offset, limit, allImportGuids.size(), SortType.NONE, null);
+ }
+
+ @GraphTransaction
+ public AtlasAsyncImportRequest getAsyncImportRequest(String importId) throws AtlasBaseException {
+ LOG.debug("==> AsyncImportService.getImportStatusById(importId={})", importId);
+
+ try {
+ AtlasAsyncImportRequest importRequest = fetchImportRequestByImportId(importId);
+
+ if (importRequest == null) {
+ throw new AtlasBaseException(AtlasErrorCode.IMPORT_NOT_FOUND, importId);
+ }
+
+ return importRequest;
+ } finally {
+ LOG.debug("<== AsyncImportService.getImportStatusById(importId={})", importId);
+ }
+ }
+}
diff --git a/repository/src/main/java/org/apache/atlas/repository/impexp/AsyncImporter.java b/repository/src/main/java/org/apache/atlas/repository/impexp/AsyncImporter.java
new file mode 100644
index 0000000000..5e995eb678
--- /dev/null
+++ b/repository/src/main/java/org/apache/atlas/repository/impexp/AsyncImporter.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.repository.impexp;
+
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
+import org.apache.atlas.model.typedef.AtlasTypesDef;
+
+public interface AsyncImporter {
+ void onImportTypeDef(AtlasTypesDef typesDef, String importId) throws AtlasBaseException;
+
+ Boolean onImportEntity(AtlasEntityWithExtInfo atlasEntityWithExtInfo, String importId, int position) throws AtlasBaseException;
+
+ void onImportComplete(String importId) throws AtlasBaseException;
+
+ void onCompleteImportRequest(String importId);
+}
diff --git a/repository/src/main/java/org/apache/atlas/repository/impexp/ImportService.java b/repository/src/main/java/org/apache/atlas/repository/impexp/ImportService.java
index 96a5e7102e..f8e8b024fe 100644
--- a/repository/src/main/java/org/apache/atlas/repository/impexp/ImportService.java
+++ b/repository/src/main/java/org/apache/atlas/repository/impexp/ImportService.java
@@ -24,18 +24,30 @@
import org.apache.atlas.entitytransform.BaseEntityHandler;
import org.apache.atlas.entitytransform.TransformerContext;
import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.PList;
+import org.apache.atlas.model.audit.AtlasAuditEntry;
+import org.apache.atlas.model.impexp.AsyncImportStatus;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
import org.apache.atlas.model.impexp.AtlasExportRequest;
import org.apache.atlas.model.impexp.AtlasImportRequest;
import org.apache.atlas.model.impexp.AtlasImportResult;
+import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
import org.apache.atlas.model.instance.AtlasObjectId;
+import org.apache.atlas.model.instance.EntityMutationResponse;
import org.apache.atlas.model.typedef.AtlasTypesDef;
+import org.apache.atlas.repository.audit.AtlasAuditService;
import org.apache.atlas.repository.store.graph.BulkImporter;
+import org.apache.atlas.repository.store.graph.v2.AsyncImportTaskExecutor;
import org.apache.atlas.repository.store.graph.v2.EntityImportStream;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
+import org.apache.atlas.utils.AtlasAsyncImportTestUtil;
+import org.apache.atlas.utils.AtlasJson;
import org.apache.atlas.utils.AtlasStringUtil;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -48,38 +60,51 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+import static org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus.FAILED;
import static org.apache.atlas.model.impexp.AtlasImportRequest.TRANSFORMERS_KEY;
import static org.apache.atlas.model.impexp.AtlasImportRequest.TRANSFORMS_KEY;
+import static org.apache.atlas.model.impexp.AtlasImportResult.OperationStatus.SUCCESS;
@Component
-public class ImportService {
+public class ImportService implements AsyncImporter {
private static final Logger LOG = LoggerFactory.getLogger(ImportService.class);
private static final String ATLAS_TYPE_HIVE_TABLE = "hive_table";
+ private static final String OPERATION_STATUS = "operationStatus";
- private final AtlasTypeDefStore typeDefStore;
- private final AtlasTypeRegistry typeRegistry;
- private final BulkImporter bulkImporter;
- private final AuditsWriter auditsWriter;
- private final ImportTransformsShaper importTransformsShaper;
+ private final AtlasTypeDefStore typeDefStore;
+ private final AtlasTypeRegistry typeRegistry;
+ private final BulkImporter bulkImporter;
+ private final AuditsWriter auditsWriter;
+ private final ImportTransformsShaper importTransformsShaper;
+ private final AsyncImportTaskExecutor asyncImportTaskExecutor;
+ private final AsyncImportService asyncImportService;
+ private final AtlasAuditService auditService;
private final TableReplicationRequestProcessor tableReplicationRequestProcessor;
- private long startTimestamp;
- private long endTimestamp;
-
@Inject
public ImportService(AtlasTypeDefStore typeDefStore, AtlasTypeRegistry typeRegistry, BulkImporter bulkImporter,
AuditsWriter auditsWriter, ImportTransformsShaper importTransformsShaper,
- TableReplicationRequestProcessor tableReplicationRequestProcessor) {
+ TableReplicationRequestProcessor tableReplicationRequestProcessor, AsyncImportTaskExecutor asyncImportTaskExecutor,
+ AsyncImportService asyncImportService, AtlasAuditService auditService) {
this.typeDefStore = typeDefStore;
this.typeRegistry = typeRegistry;
this.bulkImporter = bulkImporter;
this.auditsWriter = auditsWriter;
this.importTransformsShaper = importTransformsShaper;
this.tableReplicationRequestProcessor = tableReplicationRequestProcessor;
+ this.asyncImportTaskExecutor = asyncImportTaskExecutor;
+ this.asyncImportService = asyncImportService;
+ this.auditService = auditService;
}
public AtlasImportResult run(InputStream inputStream, String userName, String hostName, String requestingIP) throws AtlasBaseException {
@@ -131,10 +156,51 @@ public AtlasImportResult run(AtlasImportRequest request, String userName, String
return result;
}
+ public AtlasAsyncImportRequest run(AtlasImportRequest request, InputStream inputStream, String userName, String hostName, String requestingIP) throws AtlasBaseException {
+ if (request == null) {
+ request = new AtlasImportRequest();
+ }
+
+ try {
+ LOG.info("==> asyncImport(user={}, from={}, request={})", userName, requestingIP, request);
+
+ EntityImportStream source = createZipSource(inputStream, AtlasConfiguration.IMPORT_TEMP_DIRECTORY.getString());
+ String transforms = MapUtils.isNotEmpty(request.getOptions()) ? request.getOptions().get(TRANSFORMS_KEY) : null;
+
+ setImportTransform(source, transforms);
+
+ String transformers = MapUtils.isNotEmpty(request.getOptions()) ? request.getOptions().get(TRANSFORMERS_KEY) : null;
+
+ setEntityTransformerHandlers(source, transformers);
+
+ AtlasImportResult result = new AtlasImportResult(request, userName, requestingIP, hostName, System.currentTimeMillis());
+
+ result.setExportResult(source.getExportResult());
+
+ return asyncImportTaskExecutor.run(result, source);
+ } finally {
+ LOG.info("<== asyncImport(user={}, from={}, request={})", userName, requestingIP, request);
+ }
+ }
+
+ public PList getAsyncImportsStatus(int offset, int limit) throws AtlasBaseException {
+ return asyncImportService.getAsyncImportsStatus(offset, limit);
+ }
+
+ public AtlasAsyncImportRequest getAsyncImportRequest(String importId) throws AtlasBaseException {
+ return asyncImportService.getAsyncImportRequest(importId);
+ }
+
+ public void abortAsyncImport(String importId) throws AtlasBaseException {
+ asyncImportTaskExecutor.abortAsyncImportRequest(importId);
+ }
+
@VisibleForTesting
AtlasImportResult run(EntityImportStream source, AtlasImportRequest request, String userName, String hostName, String requestingIP) throws AtlasBaseException {
AtlasImportResult result = new AtlasImportResult(request, userName, requestingIP, hostName, System.currentTimeMillis());
+ long startTimestamp = System.currentTimeMillis();
+
try {
LOG.info("==> import(user={}, from={}, request={})", userName, requestingIP, request);
@@ -148,8 +214,6 @@ AtlasImportResult run(EntityImportStream source, AtlasImportRequest request, Str
setEntityTransformerHandlers(source, transformers);
- startTimestamp = System.currentTimeMillis();
-
processTypes(source.getTypesDef(), result);
setStartPosition(request, source);
@@ -167,6 +231,10 @@ AtlasImportResult run(EntityImportStream source, AtlasImportRequest request, Str
} finally {
RequestContext.get().setImportInProgress(false);
+ long endTimestamp = System.currentTimeMillis();
+
+ result.incrementMeticsCounter("duration", getDuration(endTimestamp, startTimestamp));
+
if (source != null) {
source.close();
}
@@ -177,6 +245,143 @@ AtlasImportResult run(EntityImportStream source, AtlasImportRequest request, Str
return result;
}
+ @Override
+ public void onImportTypeDef(AtlasTypesDef typesDef, String importId) throws AtlasBaseException {
+ LOG.info("==> onImportTypeDef(typesDef={}, importId={})", typesDef, importId);
+
+ AtlasAsyncImportRequest importRequest = asyncImportService.fetchImportRequestByImportId(importId);
+
+ if (importRequest == null) {
+ throw new AtlasBaseException(AtlasErrorCode.IMPORT_NOT_FOUND, importId);
+ }
+
+ AtlasImportResult result = importRequest.getImportResult();
+
+ try {
+ RequestContext.get().setImportInProgress(true);
+
+ processTypes(typesDef, result);
+ } catch (AtlasBaseException abe) {
+ importRequest.setStatus(FAILED);
+
+ throw abe;
+ } finally {
+ RequestContext.get().setImportInProgress(false);
+
+ importRequest.setImportResult(result);
+
+ asyncImportService.updateImportRequest(importRequest);
+
+ LOG.info("<== onImportTypeDef(typesDef={}, importResult={})", typesDef, importRequest.getImportResult());
+ }
+ }
+
+ @Override
+ public Boolean onImportEntity(AtlasEntityWithExtInfo entityWithExtInfo, String importId, int position) throws AtlasBaseException {
+ LOG.info("==> onImportEntity(entityWithExtInfo={}, importId={}, position={})", entityWithExtInfo, importId, position);
+
+ AtlasAsyncImportRequest importRequest = asyncImportService.fetchImportRequestByImportId(importId);
+
+ if (importRequest == null) {
+ throw new AtlasBaseException(AtlasErrorCode.IMPORT_NOT_FOUND, importId);
+ }
+
+ AtlasImportResult result = importRequest.getImportResult();
+ float importProgress = importRequest.getImportDetails().getImportProgress();
+ int importedEntitiesCounter = importRequest.getImportDetails().getImportedEntitiesCount();
+ int failedEntitiesCounter = importRequest.getImportDetails().getFailedEntitiesCount();
+ Set processedEntities = new HashSet<>(result.getProcessedEntities());
+ List failedEntities = importRequest.getImportDetails().getFailedEntities();
+ EntityMutationResponse entityMutationResponse = null;
+ long startTimestamp = System.currentTimeMillis();
+
+ try {
+ RequestContext.get().setImportInProgress(true);
+
+ TypesUtil.Pair resp = this.bulkImporter.asyncImport(entityWithExtInfo, entityMutationResponse,
+ result, processedEntities, failedEntities, position, importRequest.getImportDetails().getTotalEntitiesCount(), importProgress);
+
+ importedEntitiesCounter += 1;
+
+ importRequest.getImportDetails().setImportedEntitiesCount(importedEntitiesCounter);
+
+ result.setProcessedEntities(new ArrayList<>(processedEntities));
+
+ importRequest.getImportDetails().setImportProgress(resp.right);
+ } catch (AtlasBaseException abe) {
+ failedEntitiesCounter += 1;
+
+ importRequest.getImportDetails().setFailedEntitiesCount(failedEntitiesCounter);
+ failedEntities.add(entityWithExtInfo.getEntity().getGuid());
+ importRequest.getImportDetails().setFailedEntities(failedEntities);
+ importRequest.getImportDetails().addFailure(entityWithExtInfo.getEntity().getGuid(), abe.getMessage());
+ } finally {
+ RequestContext.get().setImportInProgress(false);
+
+ result.incrementMeticsCounter("duration", getDuration(System.currentTimeMillis(), startTimestamp));
+ importRequest.setImportResult(result);
+ importRequest.setCompletedTime(System.currentTimeMillis());
+
+ asyncImportService.updateImportRequest(importRequest);
+
+ LOG.info("<== onImportEntity(entityWithExtInfo={}, importId={}, position={})", entityWithExtInfo, importId, position);
+ }
+
+ if (importRequest.getImportDetails().getPublishedEntityCount() <=
+ importRequest.getImportDetails().getImportedEntitiesCount() + importRequest.getImportDetails().getFailedEntitiesCount()) {
+ onImportComplete(importId);
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ public void onImportComplete(String importId) throws AtlasBaseException {
+ LOG.info("==> onImportComplete(importId={})", importId);
+
+ try {
+ AtlasAsyncImportRequest importRequest = asyncImportService.fetchImportRequestByImportId(importId);
+
+ if (importRequest == null) {
+ throw new AtlasBaseException(AtlasErrorCode.IMPORT_NOT_FOUND, importId);
+ }
+
+ AtlasAsyncImportTestUtil.intercept(importRequest);
+
+ if (importRequest.getImportDetails().getTotalEntitiesCount() == importRequest.getImportDetails().getImportedEntitiesCount()) {
+ importRequest.setStatus(AtlasAsyncImportRequest.ImportStatus.SUCCESSFUL);
+ importRequest.getImportResult().setOperationStatus(SUCCESS);
+ } else if (importRequest.getImportDetails().getImportedEntitiesCount() > 0) {
+ importRequest.setStatus(AtlasAsyncImportRequest.ImportStatus.PARTIAL_SUCCESS);
+ importRequest.getImportResult().setOperationStatus(AtlasImportResult.OperationStatus.PARTIAL_SUCCESS);
+ } else {
+ importRequest.setStatus(FAILED);
+ }
+
+ asyncImportService.updateImportRequest(importRequest);
+
+ AtlasImportResult result = importRequest.getImportResult();
+
+ processReplicationDeletion(result.getExportResult().getRequest(), result.getRequest());
+
+ auditsWriter.write(result.getUserName(), result, result.getTimeStamp(), System.currentTimeMillis(), importRequest.getImportDetails().getCreationOrder());
+
+ addToImportOperationAudits(result);
+ }
+ finally {
+ LOG.info("<== onImportComplete(importId={})", importId);
+ }
+ }
+
+ @Override
+ public void onCompleteImportRequest(String importId) {
+ LOG.info("==> onCompleteImportRequest(importId={})", importId);
+
+ asyncImportTaskExecutor.onCompleteImportRequest(importId);
+
+ LOG.info("<== onCompleteImportRequest(importId={})", importId);
+ }
+
@VisibleForTesting
void setImportTransform(EntityImportStream source, String transforms) throws AtlasBaseException {
ImportTransforms importTransform = ImportTransforms.fromJson(transforms);
@@ -241,23 +446,27 @@ private void setStartPosition(AtlasImportRequest request, EntityImportStream sou
}
}
- private void processTypes(AtlasTypesDef typeDefinitionMap, AtlasImportResult result) throws AtlasBaseException {
+ @VisibleForTesting
+ void processTypes(AtlasTypesDef typeDefinitionMap, AtlasImportResult result) throws AtlasBaseException {
if (result.getRequest().getUpdateTypeDefs() != null && !result.getRequest().getUpdateTypeDefs().equals("true")) {
return;
}
ImportTypeDefProcessor importTypeDefProcessor = new ImportTypeDefProcessor(this.typeDefStore, this.typeRegistry);
+
importTypeDefProcessor.processTypes(typeDefinitionMap, result);
}
private void processEntities(String userName, EntityImportStream importSource, AtlasImportResult result) throws AtlasBaseException {
result.setExportResult(importSource.getExportResult());
+ long startTimestamp = System.currentTimeMillis();
+
this.bulkImporter.bulkImport(importSource, result);
- endTimestamp = System.currentTimeMillis();
+ long endTimestamp = System.currentTimeMillis();
- result.incrementMeticsCounter("duration", getDuration(this.endTimestamp, this.startTimestamp));
+ result.incrementMeticsCounter("duration", getDuration(endTimestamp, startTimestamp));
result.setOperationStatus(AtlasImportResult.OperationStatus.SUCCESS);
if (isMigrationMode(result.getRequest())) {
@@ -267,7 +476,20 @@ private void processEntities(String userName, EntityImportStream importSource, A
auditsWriter.write(userName, result, startTimestamp, endTimestamp, importSource.getCreationOrder());
}
- private void processReplicationDeletion(AtlasExportRequest exportRequest, AtlasImportRequest importRequest) throws AtlasBaseException {
+ private EntityImportStream createZipSource(InputStream inputStream, String configuredTemporaryDirectory) throws AtlasBaseException {
+ try {
+ if (StringUtils.isEmpty(configuredTemporaryDirectory)) {
+ return new ZipSource(inputStream);
+ } else {
+ return new ZipSourceWithBackingDirectory(inputStream, configuredTemporaryDirectory);
+ }
+ } catch (IOException ex) {
+ throw new AtlasBaseException(ex);
+ }
+ }
+
+ @VisibleForTesting
+ void processReplicationDeletion(AtlasExportRequest exportRequest, AtlasImportRequest importRequest) throws AtlasBaseException {
if (checkHiveTableIncrementalSkipLineage(importRequest, exportRequest)) {
tableReplicationRequestProcessor.process(exportRequest, importRequest);
}
@@ -283,13 +505,11 @@ private EntityImportStream createZipSource(AtlasImportRequest request, InputStre
LOG.info("ZipSource Format: ZipDirect: Size: {}", AtlasStringUtil.getOption(request.getOptions(), "size"));
return getZipDirectEntityImportStream(request, inputStream);
- }
-
- if (StringUtils.isEmpty(configuredTemporaryDirectory)) {
+ } else if (StringUtils.isEmpty(configuredTemporaryDirectory)) {
return new ZipSource(inputStream);
+ } else {
+ return new ZipSourceWithBackingDirectory(inputStream, configuredTemporaryDirectory);
}
-
- return new ZipSourceWithBackingDirectory(inputStream, configuredTemporaryDirectory);
} catch (IOException ex) {
throw new AtlasBaseException(ex);
}
@@ -306,4 +526,21 @@ private EntityImportStream getZipDirectEntityImportStream(AtlasImportRequest req
private boolean isMigrationMode(AtlasImportRequest request) {
return AtlasStringUtil.hasOption(request.getOptions(), AtlasImportRequest.OPTION_KEY_MIGRATION);
}
+
+ @VisibleForTesting
+ void addToImportOperationAudits(AtlasImportResult result) throws AtlasBaseException {
+ String params = AtlasJson.toJson(Collections.singletonMap(OPERATION_STATUS, result.getOperationStatus().name()));
+
+ if (result.getExportResult().getRequest() == null) {
+ int resultCount = result.getProcessedEntities().size();
+
+ auditService.add(AtlasAuditEntry.AuditOperation.IMPORT, params, AtlasJson.toJson(result.getMetrics()), resultCount);
+ } else {
+ List objectIds = result.getExportResult().getRequest().getItemsToExport();
+ Map entityCountByType = objectIds.stream().collect(Collectors.groupingBy(AtlasObjectId::getTypeName, Collectors.counting()));
+ int resultCount = objectIds.size();
+
+ auditService.add(AtlasAuditEntry.AuditOperation.IMPORT, params, AtlasJson.toJson(entityCountByType), resultCount);
+ }
+ }
}
diff --git a/repository/src/main/java/org/apache/atlas/repository/impexp/ZipSource.java b/repository/src/main/java/org/apache/atlas/repository/impexp/ZipSource.java
index 464bd2de59..707632a89f 100644
--- a/repository/src/main/java/org/apache/atlas/repository/impexp/ZipSource.java
+++ b/repository/src/main/java/org/apache/atlas/repository/impexp/ZipSource.java
@@ -33,6 +33,8 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
@@ -53,6 +55,7 @@ public class ZipSource implements EntityImportStream {
private ImportTransforms importTransform;
private List entityHandlers;
private int currentPosition;
+ private String md5Hash;
public ZipSource(InputStream inputStream) throws IOException, AtlasBaseException {
this(inputStream, null);
@@ -245,30 +248,43 @@ private void setCreationOrder() {
}
private void updateGuidZipEntryMap() throws IOException {
- ZipInputStream zipInputStream = new ZipInputStream(inputStream);
- ZipEntry zipEntry = zipInputStream.getNextEntry();
+ try (ZipInputStream zipInputStream = new ZipInputStream(inputStream)) {
+ MessageDigest md5Digest = MessageDigest.getInstance("MD5");
+ ZipEntry zipEntry = zipInputStream.getNextEntry();
- while (zipEntry != null) {
- String entryName = zipEntry.getName().replace(".json", "");
+ while (zipEntry != null) {
+ String entryName = zipEntry.getName().replace(".json", "");
- if (guidEntityJsonMap.containsKey(entryName)) {
- continue;
- }
+ if (guidEntityJsonMap.containsKey(entryName)) {
+ continue;
+ }
+
+ byte[] buf = new byte[1024];
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ int n;
+
+ while ((n = zipInputStream.read(buf, 0, 1024)) > -1) {
+ md5Digest.update(buf, 0, n);
+ bos.write(buf, 0, n);
+ }
- byte[] buf = new byte[1024];
- ByteArrayOutputStream bos = new ByteArrayOutputStream();
- int n;
+ guidEntityJsonMap.put(entryName, bos.toString());
- while ((n = zipInputStream.read(buf, 0, 1024)) > -1) {
- bos.write(buf, 0, n);
+ zipEntry = zipInputStream.getNextEntry();
}
- guidEntityJsonMap.put(entryName, bos.toString());
+ // Compute the final MD5 hash after processing the entire ZIP file
+ byte[] hashBytes = md5Digest.digest();
+ StringBuilder md5Hash = new StringBuilder();
- zipEntry = zipInputStream.getNextEntry();
- }
+ for (byte b : hashBytes) {
+ md5Hash.append(String.format("%02x", b));
+ }
- zipInputStream.close();
+ this.md5Hash = md5Hash.toString();
+ } catch (NoSuchAlgorithmException e) {
+ throw new IOException(e);
+ }
}
private void applyTransformers(AtlasEntityWithExtInfo entityWithExtInfo) {
@@ -328,4 +344,9 @@ private AtlasEntity getEntity(String guid) throws AtlasBaseException {
return null;
}
+
+ @Override
+ public String getMd5Hash() {
+ return this.md5Hash;
+ }
}
diff --git a/repository/src/main/java/org/apache/atlas/repository/impexp/ZipSourceDirect.java b/repository/src/main/java/org/apache/atlas/repository/impexp/ZipSourceDirect.java
index bd839d8b73..424490f381 100644
--- a/repository/src/main/java/org/apache/atlas/repository/impexp/ZipSourceDirect.java
+++ b/repository/src/main/java/org/apache/atlas/repository/impexp/ZipSourceDirect.java
@@ -201,6 +201,11 @@ public void close() {
}
}
+ @Override
+ public String getMd5Hash() {
+ return null;
+ }
+
private void applyTransformers(AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo) {
if (entityWithExtInfo == null) {
return;
diff --git a/repository/src/main/java/org/apache/atlas/repository/impexp/ZipSourceWithBackingDirectory.java b/repository/src/main/java/org/apache/atlas/repository/impexp/ZipSourceWithBackingDirectory.java
index 519bf4a30e..bdf56612bf 100644
--- a/repository/src/main/java/org/apache/atlas/repository/impexp/ZipSourceWithBackingDirectory.java
+++ b/repository/src/main/java/org/apache/atlas/repository/impexp/ZipSourceWithBackingDirectory.java
@@ -38,6 +38,8 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
@@ -62,6 +64,7 @@ public class ZipSourceWithBackingDirectory implements EntityImportStream {
private ArrayList creationOrder = new ArrayList<>();
private int currentPosition;
private int numberOfEntries;
+ private String md5Hash;
public ZipSourceWithBackingDirectory(InputStream inputStream) throws IOException, AtlasBaseException {
this(inputStream, null);
@@ -291,19 +294,32 @@ private void unzipToTempDirectory(InputStream inputStream) throws IOException {
ZipInputStream zis = new ZipInputStream(inputStream);
try {
- ZipEntry zipEntry = zis.getNextEntry();
+ MessageDigest md5Digest = MessageDigest.getInstance("MD5");
+ ZipEntry zipEntry = zis.getNextEntry();
while (zipEntry != null) {
String entryName = zipEntry.getName();
- writeJsonToFile(entryName, getJsonPayloadFromZipEntryStream(zis));
+ writeJsonToFile(entryName, getJsonPayloadFromZipEntryStream(zis, md5Digest));
numberOfEntries++;
zipEntry = zis.getNextEntry();
}
+ // Compute the final MD5 hash after processing the entire ZIP file
+ byte[] hashBytes = md5Digest.digest();
+ StringBuilder md5Hash = new StringBuilder();
+
+ for (byte b : hashBytes) {
+ md5Hash.append(String.format("%02x", b));
+ }
+
+ this.md5Hash = md5Hash.toString();
+
numberOfEntries -= ZipExportFileNames.values().length;
+ } catch (NoSuchAlgorithmException e) {
+ throw new IOException(e);
} finally {
zis.close();
inputStream.close();
@@ -354,13 +370,14 @@ private void setupIterator() {
reset();
}
- private byte[] getJsonPayloadFromZipEntryStream(ZipInputStream zipInputStream) {
+ private byte[] getJsonPayloadFromZipEntryStream(ZipInputStream zipInputStream, MessageDigest md5Digest) {
try {
byte[] buf = new byte[1024];
ByteArrayOutputStream bos = new ByteArrayOutputStream();
int n;
while ((n = zipInputStream.read(buf, 0, 1024)) > -1) {
+ md5Digest.update(buf, 0, n);
bos.write(buf, 0, n);
}
@@ -425,4 +442,9 @@ private String moveNext() {
return null;
}
+
+ @Override
+ public String getMd5Hash() {
+ return this.md5Hash;
+ }
}
diff --git a/repository/src/main/java/org/apache/atlas/repository/ogm/impexp/AtlasAsyncImportRequestDTO.java b/repository/src/main/java/org/apache/atlas/repository/ogm/impexp/AtlasAsyncImportRequestDTO.java
new file mode 100644
index 0000000000..8abf724b4b
--- /dev/null
+++ b/repository/src/main/java/org/apache/atlas/repository/ogm/impexp/AtlasAsyncImportRequestDTO.java
@@ -0,0 +1,251 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.atlas.repository.ogm.impexp;
+
+import org.apache.atlas.AtlasConfiguration;
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
+import org.apache.atlas.model.impexp.AtlasImportResult;
+import org.apache.atlas.model.instance.AtlasEntity;
+import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
+import org.apache.atlas.repository.impexp.AuditsWriter;
+import org.apache.atlas.repository.ogm.AbstractDataTransferObject;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.type.AtlasTypeRegistry;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import javax.inject.Inject;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+
+import static org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportDetails;
+import static org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus;
+
+/**
+ * AtlasAsyncImportRequestDTO is the bridge class between AtlasAsyncImportRequest and AtlasEntity.
+ */
+@Component
+public class AtlasAsyncImportRequestDTO extends AbstractDataTransferObject {
+ private static final Logger LOG = LoggerFactory.getLogger(AtlasAsyncImportRequestDTO.class);
+
+ public static final String ASYNC_IMPORT_TYPE_NAME = "__AtlasAsyncImportRequest";
+ public static final String IMPORT_RESULT_PROPERTY = "importResult";
+ public static final String REQUEST_ID_PROPERTY = "requestId";
+ public static final String IMPORT_ID_PROPERTY = "importId";
+ public static final String START_ENTITY_POSITION_PROPERTY = "startEntityPosition";
+ public static final String STATUS_PROPERTY = "status";
+ public static final String IMPORT_DETAILS_PROPERTY = "importDetails";
+ public static final String RECEIVED_TIME_PROPERTY = "receivedTime";
+ public static final String STAGED_TIME_PROPERTY = "stagedTime";
+ public static final String PROCESSING_START_TIME = "processingStartTime";
+ public static final String COMPLETED_TIME = "completedTime";
+
+ @Inject
+ public AtlasAsyncImportRequestDTO(AtlasTypeRegistry typeRegistry) {
+ super(typeRegistry, AtlasAsyncImportRequest.class, ASYNC_IMPORT_TYPE_NAME);
+ }
+
+ @Override
+ public AtlasAsyncImportRequest from(AtlasEntity entity) {
+ LOG.debug("==> AtlasAsyncImportRequestDTO.from({})", entity);
+
+ AtlasAsyncImportRequest asyncImportRequest = null;
+ String jsonImportResult = (String) entity.getAttribute(IMPORT_RESULT_PROPERTY);
+
+ if (StringUtils.isEmpty(jsonImportResult)) {
+ LOG.error("AtlasAsyncImportRequest.from(entity={}): empty/null value in attribute {}", entity, IMPORT_RESULT_PROPERTY);
+ } else {
+ String requestId = (String) entity.getAttribute(REQUEST_ID_PROPERTY);
+ String importId = (String) entity.getAttribute(IMPORT_ID_PROPERTY);
+ String status = (String) entity.getAttribute(STATUS_PROPERTY);
+ int startEntityPosition = Integer.parseInt((String) entity.getAttribute(START_ENTITY_POSITION_PROPERTY));
+ String jsonImportDetails = (String) entity.getAttribute(IMPORT_DETAILS_PROPERTY);
+ long receivedTime = objectToLong(entity.getAttribute(RECEIVED_TIME_PROPERTY));
+ long stagedTime = objectToLong(entity.getAttribute(STAGED_TIME_PROPERTY));
+ long processingStartTime = objectToLong(entity.getAttribute(PROCESSING_START_TIME));
+ long completedTime = objectToLong(entity.getAttribute(COMPLETED_TIME));
+
+ asyncImportRequest = new AtlasAsyncImportRequest(AtlasType.fromJson(jsonImportResult, AtlasImportResult.class));
+
+ asyncImportRequest.setGuid(entity.getGuid());
+ asyncImportRequest.getImportTrackingInfo().setRequestId(requestId);
+ asyncImportRequest.setImportId(importId);
+ asyncImportRequest.getImportTrackingInfo().setStartEntityPosition(startEntityPosition);
+ asyncImportRequest.setStatus(ImportStatus.valueOf(status));
+ asyncImportRequest.setImportDetails(StringUtils.isNotEmpty(jsonImportDetails) ? AtlasType.fromJson(jsonImportDetails, ImportDetails.class) : null);
+ asyncImportRequest.setReceivedTime(receivedTime);
+ asyncImportRequest.setStagedTime(stagedTime);
+ asyncImportRequest.setProcessingStartTime(processingStartTime);
+ asyncImportRequest.setCompletedTime(completedTime);
+ }
+
+ LOG.debug("<== AtlasAsyncImportRequestDTO.from(entity={}): ret={}", entity, asyncImportRequest);
+
+ return asyncImportRequest;
+ }
+
+ @Override
+ public AtlasAsyncImportRequest from(AtlasEntityWithExtInfo entityWithExtInfo) {
+ LOG.debug("==> AtlasAsyncImportRequestDTO.from(entity={})", entityWithExtInfo);
+
+ AtlasAsyncImportRequest request = from(entityWithExtInfo.getEntity());
+
+ LOG.debug("<== AtlasAsyncImportRequestDTO.from(entity={}): ret={}", entityWithExtInfo, request);
+
+ return request;
+ }
+
+ @Override
+ public AtlasEntity toEntity(AtlasAsyncImportRequest obj) throws AtlasBaseException {
+ LOG.debug("==> AtlasAsyncImportRequestDTO.toEntity(obj={})", obj);
+
+ AtlasEntity entity = getDefaultAtlasEntity(obj);
+
+ entity.setAttribute(REQUEST_ID_PROPERTY, getUniqueValue(obj));
+
+ if (obj.getImportResult() != null) {
+ entity.setAttribute(IMPORT_RESULT_PROPERTY, AtlasType.toJson(obj.getImportResult()));
+ }
+
+ entity.setAttribute(IMPORT_ID_PROPERTY, obj.getImportId());
+ entity.setAttribute(STATUS_PROPERTY, obj.getStatus());
+ entity.setAttribute(IMPORT_DETAILS_PROPERTY, AtlasType.toJson(obj.getImportDetails()));
+ entity.setAttribute(START_ENTITY_POSITION_PROPERTY, String.valueOf(obj.getImportTrackingInfo().getStartEntityPosition()));
+ entity.setAttribute(RECEIVED_TIME_PROPERTY, String.valueOf(obj.getReceivedTime()));
+ entity.setAttribute(STAGED_TIME_PROPERTY, String.valueOf(obj.getStagedTime()));
+ entity.setAttribute(PROCESSING_START_TIME, String.valueOf(obj.getProcessingStartTime()));
+ entity.setAttribute(COMPLETED_TIME, String.valueOf(obj.getCompletedTime()));
+
+ LOG.debug("<== AtlasAsyncImportRequestDTO.toEntity(obj={}): ret={}", obj, entity);
+
+ return entity;
+ }
+
+ @Override
+ public AtlasEntityWithExtInfo toEntityWithExtInfo(AtlasAsyncImportRequest obj) throws AtlasBaseException {
+ LOG.debug("==> AtlasAsyncImportRequestDTO.toEntityWithExtInfo(obj={})", obj);
+
+ AtlasEntityWithExtInfo ret = new AtlasEntityWithExtInfo(toEntity(obj));
+
+ LOG.debug("<== AtlasAsyncImportRequestDTO.toEntityWithExtInfo(obj={}): ret={}", obj, ret);
+
+ return ret;
+ }
+
+ @Override
+ public Map getUniqueAttributes(AtlasAsyncImportRequest obj) {
+ Map ret = new HashMap<>();
+
+ if (obj.getImportId() != null) {
+ ret.put(REQUEST_ID_PROPERTY, getUniqueValue(obj));
+ }
+
+ return ret;
+ }
+
+ private long objectToLong(Object object) {
+ return Optional.ofNullable(object)
+ .map(Object::toString)
+ .map(Long::parseLong)
+ .orElse(0L);
+ }
+
+ public static String convertToValidJson(String mapString) {
+ String trimmed = mapString.trim();
+
+ if (trimmed.startsWith("{")) {
+ trimmed = trimmed.substring(1);
+ }
+
+ if (trimmed.endsWith("}")) {
+ trimmed = trimmed.substring(0, trimmed.length() - 1);
+ }
+
+ String[] keyValuePairs = trimmed.split(",\\s*(?![^\\[\\]]*\\])");
+ StringBuilder jsonBuilder = new StringBuilder();
+
+ jsonBuilder.append("{");
+
+ for (int i = 0; i < keyValuePairs.length; i++) {
+ String[] keyValue = keyValuePairs[i].split("=", 2);
+ String key = keyValue[0].trim();
+ String value = keyValue[1].trim();
+
+ jsonBuilder.append("\"").append(key).append("\":");
+
+ if (value.startsWith("[") && value.endsWith("]")) {
+ String arrayContent = value.substring(1, value.length() - 1).trim();
+
+ if (arrayContent.isEmpty()) {
+ jsonBuilder.append("[]");
+ } else {
+ String[] arrayElements = arrayContent.split(",\\s*");
+
+ jsonBuilder.append("[");
+
+ for (int j = 0; j < arrayElements.length; j++) {
+ String element = arrayElements[j].trim();
+
+ if (isNumeric(element)) {
+ jsonBuilder.append(element);
+ } else {
+ jsonBuilder.append("\"").append(element).append("\"");
+ }
+
+ if (j < arrayElements.length - 1) {
+ jsonBuilder.append(",");
+ }
+ }
+
+ jsonBuilder.append("]");
+ }
+ } else if (isNumeric(value)) {
+ jsonBuilder.append(value);
+ } else {
+ jsonBuilder.append("\"").append(value).append("\"");
+ }
+
+ if (i < keyValuePairs.length - 1) {
+ jsonBuilder.append(",");
+ }
+ }
+
+ jsonBuilder.append("}");
+
+ return jsonBuilder.toString();
+ }
+
+ private static boolean isNumeric(String value) {
+ try {
+ Double.parseDouble(value);
+ return true;
+ } catch (NumberFormatException e) {
+ return false;
+ }
+ }
+
+ private String getUniqueValue(AtlasAsyncImportRequest obj) {
+ return AtlasConfiguration.ASYNC_IMPORT_REQUEST_ID_PREFIX.getString() + obj.getImportId() + "@" + AuditsWriter.getCurrentClusterName();
+ }
+}
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/BulkImporter.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/BulkImporter.java
index bf304ddd3d..d9f39c6b17 100644
--- a/repository/src/main/java/org/apache/atlas/repository/store/graph/BulkImporter.java
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/BulkImporter.java
@@ -19,8 +19,13 @@
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.impexp.AtlasImportResult;
+import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.EntityMutationResponse;
import org.apache.atlas.repository.store.graph.v2.EntityImportStream;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
+
+import java.util.List;
+import java.util.Set;
public interface BulkImporter {
/**
@@ -30,4 +35,6 @@ public interface BulkImporter {
* @throws AtlasBaseException
*/
EntityMutationResponse bulkImport(EntityImportStream entityStream, AtlasImportResult importResult) throws AtlasBaseException;
+
+ TypesUtil.Pair asyncImport(AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo, EntityMutationResponse entityMutationResponse, AtlasImportResult importResult, Set processedGuids, List failedGuids, int entityPosition, int totalEntities, float importProgress) throws AtlasBaseException;
}
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AsyncImportTaskExecutor.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AsyncImportTaskExecutor.java
new file mode 100644
index 0000000000..a6f781cfbc
--- /dev/null
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AsyncImportTaskExecutor.java
@@ -0,0 +1,259 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.repository.store.graph.v2;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.atlas.AtlasErrorCode;
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.kafka.NotificationProvider;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus;
+import org.apache.atlas.model.impexp.AtlasImportResult;
+import org.apache.atlas.model.instance.AtlasEntity;
+import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.model.notification.ImportNotification;
+import org.apache.atlas.model.notification.MessageSource;
+import org.apache.atlas.model.typedef.AtlasTypesDef;
+import org.apache.atlas.notification.NotificationException;
+import org.apache.atlas.notification.NotificationInterface;
+import org.apache.atlas.repository.impexp.AsyncImportService;
+import org.apache.atlas.repository.store.graph.v2.asyncimport.ImportTaskListener;
+import org.apache.commons.lang.ObjectUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import javax.inject.Inject;
+
+import java.util.Collections;
+import java.util.List;
+
+import static org.apache.atlas.notification.NotificationInterface.NotificationType.ASYNC_IMPORT;
+
+@Component
+public class AsyncImportTaskExecutor {
+ private static final Logger LOG = LoggerFactory.getLogger(AsyncImportTaskExecutor.class);
+
+ private static final String MESSAGE_SOURCE = AsyncImportTaskExecutor.class.getSimpleName();
+
+ private final AsyncImportService importService;
+ private final NotificationInterface notificationInterface;
+ private final ImportTaskListener importTaskListener;
+ private final MessageSource messageSource;
+
+ @Inject
+ public AsyncImportTaskExecutor(AsyncImportService importService, ImportTaskListener importTaskListener) {
+ this.importService = importService;
+ this.notificationInterface = NotificationProvider.get();
+ this.importTaskListener = importTaskListener;
+ this.messageSource = new MessageSource(MESSAGE_SOURCE);
+ }
+
+ public AtlasAsyncImportRequest run(AtlasImportResult result, EntityImportStream entityImportStream) throws AtlasBaseException {
+ try {
+ String importId = entityImportStream.getMd5Hash();
+ AtlasAsyncImportRequest importRequest = registerRequest(result, importId, entityImportStream.size(), entityImportStream.getCreationOrder());
+
+ if (ObjectUtils.equals(importRequest.getStatus(), ImportStatus.WAITING) || ObjectUtils.equals(importRequest.getStatus(), ImportStatus.PROCESSING)) {
+ LOG.warn("AsyncImportTaskExecutor.run(): Import request with id={} is already in state={}", importId, importRequest.getStatus());
+ } else {
+ // skip to the most recent published position
+ if (ObjectUtils.equals(importRequest.getStatus(), ImportStatus.STAGING)) {
+ skipToStartEntityPosition(importRequest, entityImportStream);
+ }
+
+ publishImportRequest(importRequest, entityImportStream);
+ }
+
+ return importRequest;
+ } catch (AtlasBaseException abe) {
+ throw new AtlasBaseException(AtlasErrorCode.IMPORT_FAILED, abe);
+ } finally {
+ entityImportStream.close();
+ }
+ }
+
+ public void publishTypeDefNotification(AtlasAsyncImportRequest importRequest, AtlasTypesDef atlasTypesDef) throws AtlasBaseException {
+ LOG.info("==> publishTypeDefNotification(importRequest={}, atlasTypesDef={})", importRequest, atlasTypesDef);
+
+ try {
+ HookNotification typeDefImportNotification = new ImportNotification.AtlasTypesDefImportNotification(importRequest.getImportId(), importRequest.getImportResult().getUserName(), atlasTypesDef);
+
+ sendToTopic(importRequest.getTopicName(), typeDefImportNotification);
+ } finally {
+ LOG.info("<== publishTypeDefNotification(atlasAsyncImportRequest={})", importRequest);
+ }
+ }
+
+ public void onCompleteImportRequest(String importId) {
+ importTaskListener.onCompleteImportRequest(importId);
+ }
+
+ public void abortAsyncImportRequest(String importId) throws AtlasBaseException {
+ LOG.info("==> abortAsyncImportRequest(importId={})", importId);
+
+ try {
+ AtlasAsyncImportRequest importRequest = importService.abortImport(importId);
+
+ notificationInterface.deleteTopic(ASYNC_IMPORT, importRequest.getTopicName());
+ } catch (AtlasBaseException abe) {
+ throw new AtlasBaseException(AtlasErrorCode.ABORT_IMPORT_FAILED, abe, importId);
+ } finally {
+ LOG.info("<== abortAsyncImportRequest(importId={})", importId);
+ }
+ }
+
+ public void delete() {
+ LOG.info("==> delete()");
+
+ importService.deleteRequests();
+
+ LOG.info("<== delete()");
+ }
+
+ @VisibleForTesting
+ void publishImportRequest(AtlasAsyncImportRequest importRequest, EntityImportStream entityImportStream) throws AtlasBaseException {
+ try {
+ LOG.info("==> publishImportRequest(atlasAsyncImportRequest={})", importRequest);
+
+ publishTypeDefNotification(importRequest, entityImportStream.getTypesDef());
+ publishEntityNotification(importRequest, entityImportStream);
+
+ importRequest.setStagedTime(System.currentTimeMillis());
+
+ importService.updateImportRequest(importRequest);
+
+ importTaskListener.onReceiveImportRequest(importRequest);
+ } finally {
+ notificationInterface.closeProducer(ASYNC_IMPORT, importRequest.getTopicName());
+
+ LOG.info("<== publishImportRequest(atlasAsyncImportRequest={})", importRequest);
+ }
+ }
+
+ @VisibleForTesting
+ void publishEntityNotification(AtlasAsyncImportRequest importRequest, EntityImportStream entityImportStream) {
+ LOG.info("==> publishEntityNotification(atlasAsyncImportRequest={})", importRequest);
+
+ int publishedEntityCounter = importRequest.getImportDetails().getPublishedEntityCount();
+ int failedEntityCounter = importRequest.getImportDetails().getFailedEntitiesCount();
+
+ while (entityImportStream.hasNext()) {
+ AtlasEntityWithExtInfo entityWithExtInfo = entityImportStream.getNextEntityWithExtInfo();
+ AtlasEntity entity = entityWithExtInfo != null ? entityWithExtInfo.getEntity() : null;
+ int startEntityPosition = entityImportStream.getPosition();
+
+ try {
+ if (entity == null) {
+ continue;
+ }
+
+ HookNotification entityImportNotification = new ImportNotification.AtlasEntityImportNotification(importRequest.getImportId(), importRequest.getImportResult().getUserName(), entityWithExtInfo, entityImportStream.getPosition());
+
+ sendToTopic(importRequest.getTopicName(), entityImportNotification);
+
+ entityImportStream.onImportComplete(entity.getGuid());
+
+ publishedEntityCounter += 1;
+ } catch (AtlasBaseException abe) {
+ failedEntityCounter += 1;
+
+ LOG.warn("AsyncImport(id={}): failed to publish entity guid={}", importRequest.getImportId(), entity.getGuid(), abe);
+
+ importRequest.getImportDetails().getFailedEntities().add(entity.getGuid());
+ importRequest.getImportDetails().setFailedEntitiesCount(failedEntityCounter);
+ importRequest.getImportDetails().getFailures().put(entity.getGuid(), abe.getMessage());
+ } finally {
+ importRequest.getImportTrackingInfo().setStartEntityPosition(startEntityPosition);
+ importRequest.getImportDetails().setPublishedEntityCount(publishedEntityCounter);
+
+ importService.updateImportRequest(importRequest);
+
+ LOG.info("<== publishEntityNotification(atlasAsyncImportRequest={})", importRequest);
+ }
+ }
+ }
+
+ @VisibleForTesting
+ void skipToStartEntityPosition(AtlasAsyncImportRequest importRequest, EntityImportStream entityImportStream) {
+ int startEntityPosition = importRequest.getImportTrackingInfo().getStartEntityPosition();
+
+ LOG.info("==> skipToStartEntityPosition(atlasAsyncImportRequest={}): position={}", importRequest, startEntityPosition);
+
+ while (entityImportStream.hasNext() && startEntityPosition > entityImportStream.getPosition()) {
+ entityImportStream.next();
+ }
+
+ LOG.info("<== skipToStartEntityPosition(atlasAsyncImportRequest={}): position={}", importRequest, startEntityPosition);
+ }
+
+ @VisibleForTesting
+ AtlasAsyncImportRequest registerRequest(AtlasImportResult result, String importId, int totalEntities, List creationOrder) throws AtlasBaseException {
+ LOG.info("==> registerRequest(importId={})", importId);
+
+ try {
+ AtlasAsyncImportRequest existingImportRequest = importService.fetchImportRequestByImportId(importId);
+
+ // handle new , successful and failed request from scratch
+ if (existingImportRequest == null
+ || ObjectUtils.equals(existingImportRequest.getStatus(), ImportStatus.SUCCESSFUL)
+ || ObjectUtils.equals(existingImportRequest.getStatus(), ImportStatus.PARTIAL_SUCCESS)
+ || ObjectUtils.equals(existingImportRequest.getStatus(), ImportStatus.FAILED)
+ || ObjectUtils.equals(existingImportRequest.getStatus(), ImportStatus.ABORTED)) {
+ AtlasAsyncImportRequest newImportRequest = new AtlasAsyncImportRequest(result);
+
+ newImportRequest.setImportId(importId);
+ newImportRequest.setReceivedTime(System.currentTimeMillis());
+ newImportRequest.getImportDetails().setTotalEntitiesCount(totalEntities);
+ newImportRequest.getImportDetails().setCreationOrder(creationOrder);
+
+ importService.saveImportRequest(newImportRequest);
+
+ LOG.info("registerRequest(importId={}): registered new request {}", importId, newImportRequest);
+
+ return newImportRequest;
+ } else if (ObjectUtils.equals(existingImportRequest.getStatus(), ImportStatus.STAGING)) {
+ // if we are resuming staging, we need to update the latest request received at
+ existingImportRequest.setReceivedTime(System.currentTimeMillis());
+
+ importService.updateImportRequest(existingImportRequest);
+ }
+
+ // handle request in STAGING / WAITING / PROCESSING status as resume
+ LOG.info("registerRequest(importId={}): not a new request, resuming {}", importId, existingImportRequest);
+
+ return existingImportRequest;
+ } catch (AtlasBaseException abe) {
+ LOG.error("Failed to register import request id={}", importId, abe);
+
+ throw new AtlasBaseException(AtlasErrorCode.IMPORT_REGISTRATION_FAILED, abe);
+ } finally {
+ LOG.info("<== registerRequest(importId={})", importId);
+ }
+ }
+
+ private void sendToTopic(String topic, HookNotification notification) throws AtlasBaseException {
+ try {
+ notificationInterface.send(topic, Collections.singletonList(notification), messageSource);
+ } catch (NotificationException exp) {
+ throw new AtlasBaseException(exp);
+ }
+ }
+}
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityStreamForImport.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityStreamForImport.java
index 63cfbdf8db..68885b1e4a 100644
--- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityStreamForImport.java
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasEntityStreamForImport.java
@@ -118,4 +118,9 @@ public List getCreationOrder() {
@Override
public void close() {
}
+
+ @Override
+ public String getMd5Hash() {
+ return null;
+ }
}
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasGraphUtilsV2.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasGraphUtilsV2.java
index 2aed953af7..4efc062478 100644
--- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasGraphUtilsV2.java
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/AtlasGraphUtilsV2.java
@@ -67,6 +67,7 @@
import static org.apache.atlas.repository.Constants.INDEX_SEARCH_VERTEX_PREFIX_DEFAULT;
import static org.apache.atlas.repository.Constants.INDEX_SEARCH_VERTEX_PREFIX_PROPERTY;
import static org.apache.atlas.repository.Constants.PROPAGATED_CLASSIFICATION_NAMES_KEY;
+import static org.apache.atlas.repository.Constants.PROPERTY_KEY_RECEIVED_TIME;
import static org.apache.atlas.repository.Constants.RELATIONSHIP_TYPE_PROPERTY_KEY;
import static org.apache.atlas.repository.Constants.STATE_PROPERTY_KEY;
import static org.apache.atlas.repository.Constants.SUPER_TYPES_PROPERTY_KEY;
@@ -547,6 +548,41 @@ public static List findEntityGUIDsByType(AtlasGraph graph, String typena
return findEntityGUIDsByType(graph, typename, null);
}
+ public static List findEntityPropertyValuesByTypeAndAttributes(String typeName, Map attributeValues, String propertyKey) {
+ return findEntityPropertyValuesByTypeAndAttributes(getGraphInstance(), typeName, attributeValues, propertyKey);
+ }
+
+ public static List findEntityPropertyValuesByTypeAndAttributes(AtlasGraph graph, String typeName, Map attributeValues, String propertyKey) {
+ MetricRecorder metric = RequestContext.get().startMetricRecord("findEntityPropertyValuesByTypeAndAttributes");
+ AtlasGraphQuery query = graph.query().has(ENTITY_TYPE_PROPERTY_KEY, typeName);
+
+ for (Map.Entry entry : attributeValues.entrySet()) {
+ String attrName = entry.getKey();
+ Object attrValue = entry.getValue();
+
+ if (attrName != null && attrValue != null) {
+ query.has(attrName, attrValue);
+ }
+ }
+
+ query.orderBy(PROPERTY_KEY_RECEIVED_TIME, ASC);
+
+ List propertyValues = new ArrayList<>();
+
+ for (Iterator> results = query.vertices().iterator(); results.hasNext(); ) {
+ AtlasVertex, ?> vertex = results.next();
+ String propertyValue = AtlasGraphUtilsV2.getProperty(vertex, propertyKey, String.class);
+
+ if (propertyValue != null) {
+ propertyValues.add(propertyValue);
+ }
+ }
+
+ RequestContext.get().endMetricRecord(metric);
+
+ return propertyValues;
+ }
+
public static Iterator findActiveEntityVerticesByType(AtlasGraph graph, String typename) {
AtlasGraphQuery query = graph.query().has(ENTITY_TYPE_PROPERTY_KEY, typename).has(STATE_PROPERTY_KEY, Status.ACTIVE.name());
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/BulkImporterImpl.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/BulkImporterImpl.java
index e9e3117a2c..5144588db3 100644
--- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/BulkImporterImpl.java
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/BulkImporterImpl.java
@@ -37,6 +37,7 @@
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.atlas.type.Constants;
import org.apache.atlas.utils.AtlasStringUtil;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -164,4 +165,14 @@ public EntityMutationResponse bulkImport(EntityImportStream entityStream, AtlasI
return importStrategy.run(entityStream, importResult);
}
+
+ @Override
+ public TypesUtil.Pair asyncImport(AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo, EntityMutationResponse entityMutationResponse, AtlasImportResult importResult, Set processedGuids,
+ List failedGuids, int entityPosition, int totalEntities, float importProgress) throws AtlasBaseException {
+ ImportStrategy importStrategy = new RegularImport(this.atlasGraph, this.entityStore, this.typeRegistry);
+
+ LOG.info("BulkImportImpl.asyncImport(): {}", importStrategy.getClass().getSimpleName());
+
+ return importStrategy.run(entityWithExtInfo, entityMutationResponse, importResult, processedGuids, entityPosition, totalEntities, importProgress, failedGuids);
+ }
}
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityImportStream.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityImportStream.java
index 772c0e6f87..f1bb83c79e 100644
--- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityImportStream.java
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/EntityImportStream.java
@@ -57,4 +57,6 @@ public interface EntityImportStream extends EntityStream {
List getCreationOrder();
void close();
+
+ String getMd5Hash();
}
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/asyncimport/ImportTaskListener.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/asyncimport/ImportTaskListener.java
new file mode 100644
index 0000000000..7b3875b27b
--- /dev/null
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/asyncimport/ImportTaskListener.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.repository.store.graph.v2.asyncimport;
+
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
+
+public interface ImportTaskListener {
+ void onReceiveImportRequest(AtlasAsyncImportRequest importRequest) throws AtlasBaseException;
+
+ void onCompleteImportRequest(String importId);
+}
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/ImportStrategy.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/ImportStrategy.java
index 10ae01b022..512dcbef23 100644
--- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/ImportStrategy.java
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/ImportStrategy.java
@@ -20,9 +20,23 @@
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.impexp.AtlasImportResult;
+import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.EntityMutationResponse;
import org.apache.atlas.repository.store.graph.v2.EntityImportStream;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
+
+import java.util.List;
+import java.util.Set;
public abstract class ImportStrategy {
public abstract EntityMutationResponse run(EntityImportStream entityStream, AtlasImportResult importResult) throws AtlasBaseException;
+
+ public abstract TypesUtil.Pair run(AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo,
+ EntityMutationResponse ret,
+ AtlasImportResult importResult,
+ Set processedGuids,
+ int entityStreamPosition,
+ int streamSize,
+ float currentPercent,
+ List residualList) throws AtlasBaseException;
}
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/MigrationImport.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/MigrationImport.java
index e1de025dc0..0fb6c82ad0 100644
--- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/MigrationImport.java
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/MigrationImport.java
@@ -22,6 +22,7 @@
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.impexp.AtlasImportRequest;
import org.apache.atlas.model.impexp.AtlasImportResult;
+import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
import org.apache.atlas.model.instance.EntityMutationResponse;
import org.apache.atlas.repository.converters.AtlasFormatConverters;
import org.apache.atlas.repository.converters.AtlasInstanceConverter;
@@ -40,9 +41,14 @@
import org.apache.atlas.repository.store.graph.v2.bulkimport.pc.EntityCreationManager;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.atlas.utils.AtlasStringUtil;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
+import org.apache.commons.lang.NotImplementedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.util.List;
+import java.util.Set;
+
public class MigrationImport extends ImportStrategy {
private static final Logger LOG = LoggerFactory.getLogger(MigrationImport.class);
@@ -92,6 +98,13 @@ public EntityMutationResponse run(EntityImportStream entityStream, AtlasImportRe
return ret;
}
+ @Override
+ public TypesUtil.Pair run(AtlasEntityWithExtInfo entityWithExtInfo, EntityMutationResponse ret,
+ AtlasImportResult importResult, Set processedGuids, int entityStreamPosition,
+ int streamSize, float currentPercent, List residualList) throws AtlasBaseException {
+ throw new NotImplementedException("MigrationImport: entity import not implemented");
+ }
+
private DataMigrationStatusService createMigrationStatusService(AtlasImportResult importResult) {
DataMigrationStatusService dataMigrationStatusService = new DataMigrationStatusService();
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/RegularImport.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/RegularImport.java
index 280800df80..c285b69bf5 100644
--- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/RegularImport.java
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v2/bulkimport/RegularImport.java
@@ -41,8 +41,10 @@
import org.apache.atlas.repository.store.graph.v2.EntityImportStream;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasTypeRegistry;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.exception.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -59,6 +61,7 @@ public class RegularImport extends ImportStrategy {
private static final Logger LOG = LoggerFactory.getLogger(RegularImport.class);
private static final int MAX_ATTEMPTS = 3;
+ private static final String EXCEPTION_CLASS_NAME_PERMANENT_LOCKING_EXCEPTION = "PermanentLockingException";
private final AtlasGraph graph;
private final AtlasEntityStore entityStore;
@@ -95,24 +98,52 @@ public EntityMutationResponse run(EntityImportStream entityStream, AtlasImportRe
throw new AtlasBaseException(AtlasErrorCode.INVALID_PARAMETERS, "no entities to create/update.");
}
- EntityMutationResponse ret = new EntityMutationResponse();
-
- ret.setGuidAssignments(new HashMap<>());
-
Set processedGuids = new HashSet<>();
float currentPercent = 0f;
List residualList = new ArrayList<>();
+ EntityMutationResponse ret = null;
EntityImportStreamWithResidualList entityImportStreamWithResidualList = new EntityImportStreamWithResidualList(entityStream, residualList);
while (entityImportStreamWithResidualList.hasNext()) {
AtlasEntityWithExtInfo entityWithExtInfo = entityImportStreamWithResidualList.getNextEntityWithExtInfo();
- AtlasEntity entity = entityWithExtInfo != null ? entityWithExtInfo.getEntity() : null;
- if (entity == null) {
- continue;
+ TypesUtil.Pair result = run(entityWithExtInfo, ret, importResult, processedGuids, entityStream.getPosition(), entityImportStreamWithResidualList.getStreamSize(), currentPercent, entityImportStreamWithResidualList.residualList);
+
+ ret = result.left;
+ currentPercent = result.right;
+ }
+
+ importResult.getProcessedEntities().addAll(processedGuids);
+
+ LOG.info("bulkImport(): done. Total number of entities (including referred entities) imported: {}", processedGuids.size());
+
+ return ret;
+ }
+
+ @Override
+ public TypesUtil.Pair run(AtlasEntityWithExtInfo entityWithExtInfo, EntityMutationResponse ret,
+ AtlasImportResult importResult, Set processedGuids, int entityStreamPosition,
+ int streamSize, float currentPercent, List residualList) throws AtlasBaseException {
+ if (ret == null) {
+ ret = new EntityMutationResponse();
+
+ ret.setGuidAssignments(new HashMap<>());
+ }
+
+ AtlasEntity entity = entityWithExtInfo != null ? entityWithExtInfo.getEntity() : null;
+
+ if (entity != null) {
+ if (processedGuids == null) {
+ processedGuids = new HashSet<>();
}
+ if (residualList == null) {
+ residualList = new ArrayList<>();
+ }
+
+ boolean isVertexGuidUpdated = false;
+
for (int attempt = 0; attempt < MAX_ATTEMPTS; attempt++) {
try {
AtlasEntityStreamForImport oneEntityStream = new AtlasEntityStreamForImport(entityWithExtInfo, null);
@@ -122,25 +153,39 @@ public EntityMutationResponse run(EntityImportStream entityStream, AtlasImportRe
ret.getGuidAssignments().putAll(resp.getGuidAssignments());
}
- currentPercent = updateImportMetrics(entityWithExtInfo, resp, importResult, processedGuids, entityStream.getPosition(), entityImportStreamWithResidualList.getStreamSize(), currentPercent);
+ currentPercent = updateImportMetrics(entityWithExtInfo, resp, importResult, processedGuids, entityStreamPosition, streamSize, currentPercent);
- entityStream.onImportComplete(entity.getGuid());
break;
} catch (AtlasBaseException e) {
if (!updateResidualList(e, residualList, entityWithExtInfo.getEntity().getGuid())) {
throw e;
}
+
break;
} catch (AtlasSchemaViolationException e) {
LOG.debug("Entity: {}", entity.getGuid(), e);
- if (attempt == 0) {
+ if (!isVertexGuidUpdated) {
updateVertexGuid(entityWithExtInfo);
+
+ isVertexGuidUpdated = true;
} else {
LOG.error("Guid update failed: {}", entityWithExtInfo.getEntity().getGuid());
+
throw e;
}
} catch (Throwable e) {
+ List throwableList = ExceptionUtils.getThrowableList(e);
+
+ if (!throwableList.isEmpty() && containsException(throwableList, EXCEPTION_CLASS_NAME_PERMANENT_LOCKING_EXCEPTION)) {
+ if (attempt < MAX_ATTEMPTS - 1) {
+ LOG.error("Caught {} , Retrying the transaction, attempt count is:{}", EXCEPTION_CLASS_NAME_PERMANENT_LOCKING_EXCEPTION, attempt);
+ continue;
+ } else {
+ throw e;
+ }
+ }
+
AtlasBaseException abe = new AtlasBaseException(e);
if (!updateResidualList(abe, residualList, entityWithExtInfo.getEntity().getGuid())) {
@@ -148,6 +193,7 @@ public EntityMutationResponse run(EntityImportStream entityStream, AtlasImportRe
}
LOG.warn("Exception: {}", entity.getGuid(), e);
+
break;
} finally {
RequestContext.get().clearCache();
@@ -155,23 +201,21 @@ public EntityMutationResponse run(EntityImportStream entityStream, AtlasImportRe
}
}
- importResult.getProcessedEntities().addAll(processedGuids);
-
- LOG.info("bulkImport(): done. Total number of entities (including referred entities) imported: {}", processedGuids.size());
+ return TypesUtil.Pair.of(ret, currentPercent);
+ }
- return ret;
+ private boolean containsException(final List exceptions, final String exceptionName) {
+ return exceptions.stream().anyMatch(o -> o.getClass().getSimpleName().equals(exceptionName));
}
@GraphTransaction
public void updateVertexGuid(AtlasEntityWithExtInfo entityWithExtInfo) {
updateVertexGuid(entityWithExtInfo.getEntity());
- if (MapUtils.isEmpty(entityWithExtInfo.getReferredEntities())) {
- return;
- }
-
- for (AtlasEntity entity : entityWithExtInfo.getReferredEntities().values()) {
- updateVertexGuid(entity);
+ if (MapUtils.isNotEmpty(entityWithExtInfo.getReferredEntities())) {
+ for (AtlasEntity entity : entityWithExtInfo.getReferredEntities().values()) {
+ updateVertexGuid(entity);
+ }
}
}
diff --git a/repository/src/test/java/org/apache/atlas/TestModules.java b/repository/src/test/java/org/apache/atlas/TestModules.java
index 7d3ce0fcff..60273bc33e 100644
--- a/repository/src/test/java/org/apache/atlas/TestModules.java
+++ b/repository/src/test/java/org/apache/atlas/TestModules.java
@@ -53,6 +53,7 @@
import org.apache.atlas.repository.ogm.glossary.AtlasGlossaryCategoryDTO;
import org.apache.atlas.repository.ogm.glossary.AtlasGlossaryDTO;
import org.apache.atlas.repository.ogm.glossary.AtlasGlossaryTermDTO;
+import org.apache.atlas.repository.ogm.impexp.AtlasAsyncImportRequestDTO;
import org.apache.atlas.repository.ogm.metrics.AtlasMetricsStatDTO;
import org.apache.atlas.repository.ogm.profiles.AtlasSavedSearchDTO;
import org.apache.atlas.repository.ogm.profiles.AtlasUserProfileDTO;
@@ -66,6 +67,7 @@
import org.apache.atlas.repository.store.graph.v2.BulkImporterImpl;
import org.apache.atlas.repository.store.graph.v2.EntityGraphMapper;
import org.apache.atlas.repository.store.graph.v2.IAtlasEntityChangeNotifier;
+import org.apache.atlas.repository.store.graph.v2.asyncimport.ImportTaskListener;
import org.apache.atlas.repository.store.graph.v2.tasks.ClassificationPropagateTaskFactory;
import org.apache.atlas.runner.LocalSolrRunner;
import org.apache.atlas.service.Service;
@@ -131,6 +133,7 @@ protected void configure() {
bind(ExportService.class).asEagerSingleton();
bind(SearchTracker.class).asEagerSingleton();
+ bind(ImportTaskListener.class).toInstance(Mockito.mock(ImportTaskListener.class));
bind(AtlasEntityStore.class).to(AtlasEntityStoreV2.class);
bind(AtlasRelationshipStore.class).to(AtlasRelationshipStoreV2.class);
@@ -164,6 +167,7 @@ protected void configure() {
availableDTOs.addBinding().to(ExportImportAuditEntryDTO.class);
availableDTOs.addBinding().to(AtlasAuditEntryDTO.class);
availableDTOs.addBinding().to(AtlasMetricsStatDTO.class);
+ availableDTOs.addBinding().to(AtlasAsyncImportRequestDTO.class);
bind(DTORegistry.class).asEagerSingleton();
bind(DataAccess.class).asEagerSingleton();
diff --git a/repository/src/test/java/org/apache/atlas/repository/impexp/AsyncImportServiceTest.java b/repository/src/test/java/org/apache/atlas/repository/impexp/AsyncImportServiceTest.java
new file mode 100644
index 0000000000..50378b63e6
--- /dev/null
+++ b/repository/src/test/java/org/apache/atlas/repository/impexp/AsyncImportServiceTest.java
@@ -0,0 +1,248 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.repository.impexp;
+
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.PList;
+import org.apache.atlas.model.impexp.AsyncImportStatus;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
+import org.apache.atlas.model.impexp.AtlasImportResult;
+import org.apache.atlas.repository.ogm.DataAccess;
+import org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2;
+import org.mockito.Mock;
+import org.mockito.MockedStatic;
+import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import static org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus.PROCESSING;
+import static org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus.SUCCESSFUL;
+import static org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus.WAITING;
+import static org.apache.atlas.repository.Constants.PROPERTY_KEY_ASYNC_IMPORT_ID;
+import static org.apache.atlas.repository.Constants.PROPERTY_KEY_ASYNC_IMPORT_STATUS;
+import static org.apache.atlas.repository.ogm.impexp.AtlasAsyncImportRequestDTO.ASYNC_IMPORT_TYPE_NAME;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anyList;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
+import static org.testng.Assert.assertNull;
+import static org.testng.Assert.assertTrue;
+
+public class AsyncImportServiceTest {
+ private DataAccess dataAccess;
+ private AsyncImportService asyncImportService;
+
+ @Mock
+ private AtlasGraphUtilsV2 atlasGraphUtilsV2;
+
+ @BeforeMethod
+ public void setup() {
+ MockitoAnnotations.openMocks(this);
+
+ dataAccess = mock(DataAccess.class);
+ asyncImportService = new AsyncImportService(dataAccess);
+ }
+
+ @Test
+ public void testFetchImportRequestByImportId() throws Exception {
+ String importId = "import123";
+ AtlasAsyncImportRequest mockRequest = new AtlasAsyncImportRequest();
+
+ mockRequest.setImportId(importId);
+
+ when(dataAccess.load(any(AtlasAsyncImportRequest.class))).thenReturn(mockRequest);
+
+ AtlasAsyncImportRequest result = asyncImportService.fetchImportRequestByImportId(importId);
+
+ assertNotNull(result);
+ assertEquals(result.getImportId(), importId);
+ verify(dataAccess, times(1)).load(any(AtlasAsyncImportRequest.class));
+ }
+
+ @Test
+ public void testFetchImportRequestByImportIdError() throws AtlasBaseException {
+ String importId = "import123";
+
+ when(dataAccess.load(any(AtlasAsyncImportRequest.class))).thenThrow(new RuntimeException("Test Exception"));
+
+ AtlasAsyncImportRequest result = asyncImportService.fetchImportRequestByImportId(importId);
+
+ assertNull(result);
+ verify(dataAccess, times(1)).load(any(AtlasAsyncImportRequest.class));
+ }
+
+ @Test
+ public void testSaveImportRequest() throws AtlasBaseException {
+ AtlasAsyncImportRequest importRequest = new AtlasAsyncImportRequest();
+
+ importRequest.setImportId("import123");
+
+ asyncImportService.saveImportRequest(importRequest);
+
+ verify(dataAccess, times(1)).save(importRequest);
+ }
+
+ @Test
+ public void testUpdateImportRequest() throws AtlasBaseException {
+ AtlasAsyncImportRequest importRequest = new AtlasAsyncImportRequest();
+
+ importRequest.setImportId("import123");
+
+ doThrow(new AtlasBaseException("Save failed")).when(dataAccess).save(importRequest);
+
+ asyncImportService.updateImportRequest(importRequest);
+
+ verify(dataAccess, times(1)).save(importRequest);
+ }
+
+ @Test
+ public void testFetchInProgressImportIds() throws AtlasBaseException {
+ AtlasAsyncImportRequest request1 = new AtlasAsyncImportRequest();
+ AtlasAsyncImportRequest request2 = new AtlasAsyncImportRequest();
+
+ request1.setImportId("guid1");
+ request1.setStatus(PROCESSING);
+
+ request2.setImportId("guid2");
+ request2.setStatus(SUCCESSFUL);
+
+ try (MockedStatic mockedStatic = mockStatic(AtlasGraphUtilsV2.class)) {
+ mockedStatic.when(() -> AtlasGraphUtilsV2.findEntityPropertyValuesByTypeAndAttributes(ASYNC_IMPORT_TYPE_NAME,
+ Collections.singletonMap(PROPERTY_KEY_ASYNC_IMPORT_STATUS, PROCESSING),
+ PROPERTY_KEY_ASYNC_IMPORT_ID)).thenReturn(Collections.singletonList("guid1"));
+
+ mockedStatic.when(() -> AtlasGraphUtilsV2.findEntityPropertyValuesByTypeAndAttributes(ASYNC_IMPORT_TYPE_NAME,
+ Collections.singletonMap(PROPERTY_KEY_ASYNC_IMPORT_STATUS, SUCCESSFUL),
+ PROPERTY_KEY_ASYNC_IMPORT_ID)).thenReturn(Collections.singletonList("guid2"));
+
+ List result = asyncImportService.fetchInProgressImportIds();
+
+ assertEquals(result.size(), 1);
+ assertTrue(result.contains("guid1"));
+
+ mockedStatic.verify(() -> AtlasGraphUtilsV2.findEntityPropertyValuesByTypeAndAttributes(anyString(), any(Map.class), anyString()));
+ }
+ }
+
+ @Test
+ public void testFetchQueuedImportRequests() throws AtlasBaseException {
+ AtlasAsyncImportRequest request1 = new AtlasAsyncImportRequest();
+ AtlasAsyncImportRequest request2 = new AtlasAsyncImportRequest();
+
+ request1.setImportId("guid1");
+ request1.setStatus(WAITING);
+
+ request2.setImportId("guid2");
+ request2.setStatus(PROCESSING);
+
+ try (MockedStatic mockStatic = mockStatic(AtlasGraphUtilsV2.class)) {
+ mockStatic.when(() -> AtlasGraphUtilsV2.findEntityPropertyValuesByTypeAndAttributes(ASYNC_IMPORT_TYPE_NAME,
+ Collections.singletonMap(PROPERTY_KEY_ASYNC_IMPORT_STATUS, WAITING),
+ PROPERTY_KEY_ASYNC_IMPORT_ID)).thenReturn(Collections.singletonList("guid1"));
+
+ mockStatic.when(() -> AtlasGraphUtilsV2.findEntityPropertyValuesByTypeAndAttributes(ASYNC_IMPORT_TYPE_NAME,
+ Collections.singletonMap(PROPERTY_KEY_ASYNC_IMPORT_STATUS, PROCESSING),
+ PROPERTY_KEY_ASYNC_IMPORT_ID)).thenReturn(Collections.singletonList("guid2"));
+
+ List result = asyncImportService.fetchQueuedImportRequests();
+
+ assertEquals(result.size(), 1);
+ assertTrue(result.contains("guid1"));
+ }
+ }
+
+ @Test
+ public void testDeleteRequests() throws AtlasBaseException {
+ List guids = Arrays.asList("guid1", "guid2");
+
+ try (MockedStatic mockStatic = mockStatic(AtlasGraphUtilsV2.class)) {
+ mockStatic.when(() -> AtlasGraphUtilsV2.findEntityGUIDsByType(anyString(), any())).thenReturn(guids);
+
+ asyncImportService.deleteRequests();
+
+ verify(dataAccess, times(1)).delete(guids);
+ }
+ }
+
+ @Test
+ public void testGetAsyncImportsStatus() throws AtlasBaseException {
+ List guids = Arrays.asList("guid1", "guid2");
+ AtlasAsyncImportRequest request1 = spy(new AtlasAsyncImportRequest());
+ AtlasImportResult mockImportResult = mock(AtlasImportResult.class);
+
+ request1.setImportId("guid1");
+ request1.setStatus(AtlasAsyncImportRequest.ImportStatus.PROCESSING);
+ request1.setReceivedTime(System.currentTimeMillis());
+
+ doReturn("admin").when(mockImportResult).getUserName();
+ request1.setImportResult(mockImportResult);
+
+ int offset = 0;
+ int limit = 10;
+
+ try (MockedStatic mockStatic = mockStatic(AtlasGraphUtilsV2.class)) {
+ mockStatic.when(() -> AtlasGraphUtilsV2.findEntityGUIDsByType(anyString(), any())).thenReturn(guids);
+ when(dataAccess.load(anyList())).thenReturn(Collections.singletonList(request1));
+
+ PList result = asyncImportService.getAsyncImportsStatus(offset, limit);
+
+ assertEquals(result.getList().size(), 1);
+ assertEquals(result.getList().get(0).getImportId(), "guid1");
+ assertEquals(result.getList().get(0).getImportRequestUser(), "admin");
+
+ verify(dataAccess, times(1)).load(anyList());
+ }
+ }
+
+ @Test
+ public void testGetImportStatusById() throws AtlasBaseException {
+ String importId = "import123";
+ AtlasAsyncImportRequest request = new AtlasAsyncImportRequest();
+
+ request.setImportId(importId);
+
+ when(dataAccess.load(any(AtlasAsyncImportRequest.class))).thenReturn(request);
+
+ AtlasAsyncImportRequest result = asyncImportService.getAsyncImportRequest(importId);
+
+ assertNotNull(result);
+ assertEquals(result.getImportId(), importId);
+ verify(dataAccess, times(1)).load(any(AtlasAsyncImportRequest.class));
+ }
+
+ @AfterMethod
+ public void tearDown() {
+ Mockito.reset(dataAccess);
+ }
+}
diff --git a/repository/src/test/java/org/apache/atlas/repository/impexp/ImportServiceTest.java b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportServiceTest.java
index 33f95608dc..dbf84dbde4 100644
--- a/repository/src/test/java/org/apache/atlas/repository/impexp/ImportServiceTest.java
+++ b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportServiceTest.java
@@ -24,24 +24,32 @@
import org.apache.atlas.TestUtilsV2;
import org.apache.atlas.discovery.EntityDiscoveryService;
import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
import org.apache.atlas.model.impexp.AtlasExportRequest;
+import org.apache.atlas.model.impexp.AtlasExportResult;
import org.apache.atlas.model.impexp.AtlasImportRequest;
+import org.apache.atlas.model.impexp.AtlasImportResult;
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.AtlasEntityHeader;
import org.apache.atlas.model.instance.AtlasObjectId;
import org.apache.atlas.model.instance.AtlasRelatedObjectId;
import org.apache.atlas.model.instance.AtlasRelationship;
import org.apache.atlas.model.instance.EntityMutationResponse;
+import org.apache.atlas.model.typedef.AtlasTypesDef;
import org.apache.atlas.repository.AtlasTestBase;
import org.apache.atlas.repository.Constants;
+import org.apache.atlas.repository.audit.AtlasAuditService;
import org.apache.atlas.repository.graph.AtlasGraphProvider;
import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
+import org.apache.atlas.repository.store.graph.BulkImporter;
+import org.apache.atlas.repository.store.graph.v2.AsyncImportTaskExecutor;
import org.apache.atlas.repository.store.graph.v2.AtlasEntityStream;
import org.apache.atlas.repository.store.graph.v2.AtlasGraphUtilsV2;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasClassificationType;
import org.apache.atlas.type.AtlasTypeRegistry;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.apache.commons.lang.StringUtils;
import org.mockito.stubbing.Answer;
import org.testng.ITestContext;
@@ -60,6 +68,9 @@
import java.util.List;
import java.util.Map;
+import static org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus.FAILED;
+import static org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus.PARTIAL_SUCCESS;
+import static org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus.PROCESSING;
import static org.apache.atlas.model.impexp.AtlasExportRequest.FETCH_TYPE_FULL;
import static org.apache.atlas.model.impexp.AtlasExportRequest.FETCH_TYPE_INCREMENTAL;
import static org.apache.atlas.model.impexp.AtlasExportRequest.OPTION_FETCH_TYPE;
@@ -73,13 +84,25 @@
import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.runImportWithParameters;
import static org.apache.atlas.utils.TestLoadModelUtils.loadModelFromJson;
import static org.apache.atlas.utils.TestLoadModelUtils.loadModelFromResourcesJson;
+import static org.mockito.ArgumentMatchers.anyFloat;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anyInt;
+import static org.mockito.Mockito.anyLong;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertNotNull;
+import static org.testng.Assert.assertThrows;
import static org.testng.Assert.assertTrue;
+import static org.testng.Assert.fail;
@Guice(modules = TestModules.TestOnlyModule.class)
public class ImportServiceTest extends AtlasTestBase {
@@ -96,6 +119,8 @@ public class ImportServiceTest extends AtlasTestBase {
private EntityDiscoveryService discoveryService;
@Inject
private ExportImportAuditService auditService;
+ @Inject
+ private AtlasAuditService atlasAuditService;
@Inject
public ImportServiceTest(ImportService importService) {
@@ -396,11 +421,410 @@ public void importDB7(InputStream inputStream) throws AtlasBaseException, IOExce
assertEntityCount("hive_column", "6c4f196a-4046-493b-8c3a-2b1a9ef255a2", 1);
}
+ @Test(dataProvider = "provideOnImportTypeDefScenarios")
+ public void testOnImportTypeDef(AtlasAsyncImportRequest importRequest, boolean shouldThrowException, boolean shouldFailProcessing) throws AtlasBaseException {
+ AtlasTypeDefStore typeDefStore = mock(AtlasTypeDefStore.class);
+ AtlasTypeRegistry typeRegistry = mock(AtlasTypeRegistry.class);
+ BulkImporter bulkImporter = mock(BulkImporter.class);
+ AuditsWriter auditsWriter = mock(AuditsWriter.class);
+ ImportTransformsShaper importTransformsShaper = mock(ImportTransformsShaper.class);
+ TableReplicationRequestProcessor tableReplicationRequestProcessor = mock(TableReplicationRequestProcessor.class);
+ AsyncImportTaskExecutor asyncImportTaskPublisher = mock(AsyncImportTaskExecutor.class);
+ AsyncImportService asyncImportService = mock(AsyncImportService.class);
+ AtlasAuditService auditService = mock(AtlasAuditService.class);
+
+ // Create ImportService instance and spy on it to override processTypes
+ ImportService importService = new ImportService(typeDefStore, typeRegistry, bulkImporter, auditsWriter,
+ importTransformsShaper, tableReplicationRequestProcessor, asyncImportTaskPublisher, asyncImportService, auditService);
+
+ ImportService spyImportService = spy(importService);
+
+ String importId = "test-import-id";
+ AtlasTypesDef typesDef = new AtlasTypesDef();
+
+ // Mock asyncImportService.fetchImportRequestByImportId()
+ when(asyncImportService.fetchImportRequestByImportId(importId)).thenReturn(importRequest);
+
+ // Mock processTypes() for happy path
+ if (!shouldFailProcessing) {
+ doNothing().when(spyImportService).processTypes(any(), any());
+ } else {
+ // Mock processTypes() to throw an exception in failure cases
+ doThrow(new AtlasBaseException(AtlasErrorCode.INVALID_PARAMETERS))
+ .when(spyImportService).processTypes(any(), any());
+ }
+
+ if (shouldThrowException) {
+ assertThrows(AtlasBaseException.class, () -> spyImportService.onImportTypeDef(typesDef, importId));
+ if (importRequest != null) {
+ assertEquals(FAILED, importRequest.getStatus());
+ }
+ } else {
+ try {
+ spyImportService.onImportTypeDef(typesDef, importId); // No exception expected
+ } catch (Exception e) {
+ fail("Unexpected exception thrown: " + e.getMessage());
+ }
+
+ // Verify updateImportRequest() is called
+ verify(asyncImportService).updateImportRequest(importRequest);
+ }
+ }
+
+ @DataProvider(name = "provideOnImportTypeDefScenarios")
+ public Object[][] provideOnImportTypeDefScenarios() {
+ AtlasAsyncImportRequest validRequest = new AtlasAsyncImportRequest(new AtlasImportResult());
+ return new Object[][] {
+ {validRequest, false, false}, // Normal case - should not throw exception
+ {null, true, false}, // Import request is null - should throw exception
+ {validRequest, true, true} // Processing failure - should throw exception and set FAILED
+ };
+ }
+
+ @Test
+ public void testOnImportEntityWhenImportRequestIsNullShouldThrowException() throws AtlasBaseException {
+ String importId = "test-import-id";
+ int position = 1;
+ AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo = mock(AtlasEntity.AtlasEntityWithExtInfo.class);
+
+ AsyncImportService asyncImportService = mock(AsyncImportService.class);
+ when(asyncImportService.fetchImportRequestByImportId(importId)).thenReturn(null);
+
+ ImportService importService = new ImportService(
+ mock(AtlasTypeDefStore.class),
+ mock(AtlasTypeRegistry.class),
+ mock(BulkImporter.class),
+ mock(AuditsWriter.class),
+ mock(ImportTransformsShaper.class),
+ mock(TableReplicationRequestProcessor.class),
+ mock(AsyncImportTaskExecutor.class),
+ asyncImportService,
+ mock(AtlasAuditService.class));
+
+ assertThrows(AtlasBaseException.class, () -> importService.onImportEntity(entityWithExtInfo, importId, position));
+ }
+
+ @Test
+ public void testOnImportEntityWhenProcessingFailsAndDidNotReachEndShouldReturnFalse() throws AtlasBaseException {
+ String importId = "test-import-id";
+ int position = 1;
+ AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo = mock(AtlasEntity.AtlasEntityWithExtInfo.class);
+ AtlasEntity mockEntity = mock(AtlasEntity.class);
+ when(entityWithExtInfo.getEntity()).thenReturn(mockEntity);
+ when(mockEntity.getGuid()).thenReturn("entity-guid");
+
+ AsyncImportService asyncImportService = mock(AsyncImportService.class);
+ BulkImporter bulkImporter = mock(BulkImporter.class);
+
+ AtlasAsyncImportRequest importRequest = new AtlasAsyncImportRequest();
+ AtlasImportResult importResult = new AtlasImportResult();
+ AtlasAsyncImportRequest.ImportDetails importDetails = new AtlasAsyncImportRequest.ImportDetails();
+
+ importDetails.setImportedEntitiesCount(3);
+ importDetails.setFailedEntitiesCount(5);
+ importDetails.setPublishedEntityCount(10);
+
+ importResult.setProcessedEntities(new ArrayList<>());
+ importRequest.setImportResult(importResult);
+ importRequest.setImportDetails(importDetails);
+ importRequest.setStatus(PROCESSING);
+
+ when(asyncImportService.fetchImportRequestByImportId(importId)).thenReturn(importRequest);
+ doThrow(new AtlasBaseException(AtlasErrorCode.INVALID_PARAMETERS))
+ .when(bulkImporter).asyncImport(any(), any(), any(), any(), any(), anyInt(), anyInt(), anyFloat());
+
+ ImportService importService = new ImportService(
+ mock(AtlasTypeDefStore.class),
+ mock(AtlasTypeRegistry.class),
+ bulkImporter,
+ mock(AuditsWriter.class),
+ mock(ImportTransformsShaper.class),
+ mock(TableReplicationRequestProcessor.class),
+ mock(AsyncImportTaskExecutor.class),
+ asyncImportService,
+ mock(AtlasAuditService.class));
+ boolean result = importService.onImportEntity(entityWithExtInfo, importId, position);
+
+ assertFalse(result);
+ assertEquals(importRequest.getStatus(), PROCESSING);
+ assertEquals(importRequest.getImportDetails().getImportedEntitiesCount(), 3);
+ assertEquals(importRequest.getImportDetails().getFailedEntitiesCount(), 6);
+ assertTrue(importRequest.getImportDetails().getFailedEntities().contains("entity-guid"));
+ assertTrue(importRequest.getImportDetails().getFailures().containsKey("entity-guid"));
+ }
+
+ @Test
+ public void testOnImportEntityWhenProcessingSucceedsButDidNotReachEndShouldReturnFalse() throws AtlasBaseException {
+ String importId = "test-import-id";
+ int position = 1;
+ AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo = mock(AtlasEntity.AtlasEntityWithExtInfo.class);
+ AtlasEntity mockEntity = mock(AtlasEntity.class);
+ when(entityWithExtInfo.getEntity()).thenReturn(mockEntity);
+ when(mockEntity.getGuid()).thenReturn("entity-guid");
+
+ EntityMutationResponse mockEntityMutationResponse = mock(EntityMutationResponse.class);
+ float mockProgress = 75.0f; // Simulated new progress value
+ TypesUtil.Pair mockResponse = TypesUtil.Pair.of(mockEntityMutationResponse, mockProgress);
+
+ AsyncImportService asyncImportService = mock(AsyncImportService.class);
+ BulkImporter bulkImporter = mock(BulkImporter.class);
+
+ AtlasAsyncImportRequest importRequest = new AtlasAsyncImportRequest();
+ AtlasImportResult importResult = new AtlasImportResult();
+ AtlasAsyncImportRequest.ImportDetails importDetails = new AtlasAsyncImportRequest.ImportDetails();
+
+ importDetails.setImportedEntitiesCount(3);
+ importDetails.setFailedEntitiesCount(5);
+ importDetails.setPublishedEntityCount(10);
+
+ importResult.setProcessedEntities(new ArrayList<>());
+ importRequest.setImportId(importId);
+ importRequest.setImportResult(importResult);
+ importRequest.setImportDetails(importDetails);
+ importRequest.setStatus(PROCESSING);
+
+ when(asyncImportService.fetchImportRequestByImportId(importId)).thenReturn(importRequest);
+ when(bulkImporter.asyncImport(any(), any(), any(), any(), any(), anyInt(), anyInt(), anyFloat()))
+ .thenReturn(mockResponse);
+
+ ImportService importService = new ImportService(
+ mock(AtlasTypeDefStore.class),
+ mock(AtlasTypeRegistry.class),
+ bulkImporter,
+ mock(AuditsWriter.class),
+ mock(ImportTransformsShaper.class),
+ mock(TableReplicationRequestProcessor.class),
+ mock(AsyncImportTaskExecutor.class),
+ asyncImportService,
+ mock(AtlasAuditService.class));
+
+ boolean result = importService.onImportEntity(entityWithExtInfo, importId, position);
+
+ assertFalse(result);
+ assertEquals(importRequest.getImportDetails().getImportedEntitiesCount(), 4);
+ assertEquals(importRequest.getImportDetails().getFailedEntitiesCount(), 5);
+ assertEquals(importRequest.getStatus(), PROCESSING);
+ }
+
+ @Test
+ public void testOnImportEntityWhenProcessingReachesEndStatusIsPartialSuccessIfFailedEntityCountIsGreaterThanZero() throws AtlasBaseException {
+ String importId = "test-import-id";
+ int position = 1;
+ AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo = mock(AtlasEntity.AtlasEntityWithExtInfo.class);
+ AtlasEntity mockEntity = mock(AtlasEntity.class);
+ when(entityWithExtInfo.getEntity()).thenReturn(mockEntity);
+ when(mockEntity.getGuid()).thenReturn("entity-guid");
+
+ EntityMutationResponse mockEntityMutationResponse = mock(EntityMutationResponse.class);
+ float mockProgress = 75.0f; // Simulated new progress value
+ TypesUtil.Pair mockResponse = TypesUtil.Pair.of(mockEntityMutationResponse, mockProgress);
+
+ AsyncImportService asyncImportService = mock(AsyncImportService.class);
+ AuditsWriter auditsWriter = mock(AuditsWriter.class);
+ BulkImporter bulkImporter = mock(BulkImporter.class);
+
+ AtlasAsyncImportRequest importRequest = new AtlasAsyncImportRequest();
+ AtlasImportResult importResult = new AtlasImportResult();
+ AtlasAsyncImportRequest.ImportDetails importDetails = new AtlasAsyncImportRequest.ImportDetails();
+ AtlasExportResult exportResult = new AtlasExportResult();
+
+ exportResult.setRequest(new AtlasExportRequest());
+ importResult.setExportResult(exportResult);
+ importResult.setRequest(new AtlasImportRequest());
+ importDetails.setImportedEntitiesCount(5);
+ importDetails.setFailedEntitiesCount(4);
+ importDetails.setPublishedEntityCount(10);
+
+ importResult.setProcessedEntities(new ArrayList<>());
+ importRequest.setImportId(importId);
+ importRequest.setImportResult(importResult);
+ importRequest.setImportDetails(importDetails);
+ importRequest.setStatus(PROCESSING);
+
+ when(asyncImportService.fetchImportRequestByImportId(importId)).thenReturn(importRequest);
+ when(bulkImporter.asyncImport(any(), any(), any(), any(), any(), anyInt(), anyInt(), anyFloat()))
+ .thenReturn(mockResponse);
+
+ ImportService spyImportService = spy(new ImportService(
+ mock(AtlasTypeDefStore.class),
+ mock(AtlasTypeRegistry.class),
+ bulkImporter,
+ auditsWriter,
+ mock(ImportTransformsShaper.class),
+ mock(TableReplicationRequestProcessor.class),
+ mock(AsyncImportTaskExecutor.class),
+ asyncImportService,
+ mock(AtlasAuditService.class)));
+ doNothing().when(spyImportService).processReplicationDeletion(any(), any());
+ doNothing().when(auditsWriter).write(anyString(), any(AtlasImportResult.class), anyLong(), anyLong(), any());
+ doNothing().when(spyImportService).addToImportOperationAudits(any());
+
+ boolean result = spyImportService.onImportEntity(entityWithExtInfo, importId, position);
+
+ assertTrue(result);
+ assertEquals(importRequest.getImportDetails().getImportedEntitiesCount(), 6);
+ assertEquals(importRequest.getImportDetails().getFailedEntitiesCount(), 4);
+ assertEquals(importRequest.getStatus(), PARTIAL_SUCCESS);
+ assertEquals(importRequest.getImportResult().getOperationStatus(), AtlasImportResult.OperationStatus.PARTIAL_SUCCESS);
+
+ verify(spyImportService, times(1)).processReplicationDeletion(any(), any());
+ verify(spyImportService, times(1)).addToImportOperationAudits(any());
+ }
+
+ @Test
+ public void testOnImportCompleteWhenProcessingReachesEndStatusIsPartialSuccessIfFailedEntityCountIsGreaterThanZero() throws AtlasBaseException {
+ String importId = "test-import-id";
+
+ AtlasAsyncImportRequest importRequest = new AtlasAsyncImportRequest();
+ AtlasImportResult importResult = new AtlasImportResult();
+ AtlasAsyncImportRequest.ImportDetails importDetails = new AtlasAsyncImportRequest.ImportDetails();
+ AtlasExportResult exportResult = new AtlasExportResult();
+
+ importDetails.setImportedEntitiesCount(2);
+ importDetails.setFailedEntitiesCount(3);
+ importDetails.setPublishedEntityCount(5);
+
+ importResult.setRequest(new AtlasImportRequest());
+ importResult.setExportResult(exportResult);
+ importRequest.setImportId(importId);
+ importRequest.setImportDetails(importDetails);
+ importRequest.setImportResult(importResult);
+ importRequest.setStatus(PROCESSING);
+
+ AsyncImportService asyncImportService = mock(AsyncImportService.class);
+ AuditsWriter auditsWriter = mock(AuditsWriter.class);
+
+ when(asyncImportService.fetchImportRequestByImportId(importId)).thenReturn(importRequest);
+
+ ImportService importService = new ImportService(
+ mock(AtlasTypeDefStore.class),
+ mock(AtlasTypeRegistry.class),
+ mock(BulkImporter.class),
+ auditsWriter,
+ mock(ImportTransformsShaper.class),
+ mock(TableReplicationRequestProcessor.class),
+ mock(AsyncImportTaskExecutor.class),
+ asyncImportService,
+ mock(AtlasAuditService.class));
+
+ importService.onImportComplete(importId);
+
+ assertEquals(importRequest.getStatus(), PARTIAL_SUCCESS);
+ assertEquals(importResult.getOperationStatus(), AtlasImportResult.OperationStatus.PARTIAL_SUCCESS);
+ }
+
+ @Test
+ public void testOnImportEntityWhenProcessingReachesEndStatusIsFailureIfImportedEntityCountIsZero() throws AtlasBaseException {
+ String importId = "test-import-id";
+ int position = 1;
+ AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo = mock(AtlasEntity.AtlasEntityWithExtInfo.class);
+ AtlasEntity mockEntity = mock(AtlasEntity.class);
+ when(entityWithExtInfo.getEntity()).thenReturn(mockEntity);
+ when(mockEntity.getGuid()).thenReturn("entity-guid");
+
+ EntityMutationResponse mockEntityMutationResponse = mock(EntityMutationResponse.class);
+ float mockProgress = 75.0f; // Simulated new progress value
+ TypesUtil.Pair mockResponse = TypesUtil.Pair.of(mockEntityMutationResponse, mockProgress);
+
+ AsyncImportService asyncImportService = mock(AsyncImportService.class);
+ AuditsWriter auditsWriter = mock(AuditsWriter.class);
+ BulkImporter bulkImporter = mock(BulkImporter.class);
+
+ AtlasAsyncImportRequest importRequest = new AtlasAsyncImportRequest();
+ AtlasImportResult importResult = new AtlasImportResult();
+ AtlasAsyncImportRequest.ImportDetails importDetails = new AtlasAsyncImportRequest.ImportDetails();
+ AtlasExportResult exportResult = new AtlasExportResult();
+
+ exportResult.setRequest(new AtlasExportRequest());
+ importResult.setExportResult(exportResult);
+ importResult.setRequest(new AtlasImportRequest());
+ importDetails.setImportedEntitiesCount(0);
+ importDetails.setFailedEntitiesCount(9);
+ importDetails.setPublishedEntityCount(10);
+ importDetails.setTotalEntitiesCount(10);
+
+ importResult.setProcessedEntities(new ArrayList<>());
+ importRequest.setImportId(importId);
+ importRequest.setImportResult(importResult);
+ importRequest.setImportDetails(importDetails);
+ importRequest.setStatus(PROCESSING);
+
+ when(asyncImportService.fetchImportRequestByImportId(importId)).thenReturn(importRequest);
+ when(bulkImporter.asyncImport(any(), any(), any(), any(), any(), anyInt(), anyInt(), anyFloat()))
+ .thenThrow(new AtlasBaseException(AtlasErrorCode.INVALID_PARAMETERS));
+ ImportService spyImportService = spy(new ImportService(
+ mock(AtlasTypeDefStore.class),
+ mock(AtlasTypeRegistry.class),
+ bulkImporter,
+ auditsWriter,
+ mock(ImportTransformsShaper.class),
+ mock(TableReplicationRequestProcessor.class),
+ mock(AsyncImportTaskExecutor.class),
+ asyncImportService,
+ mock(AtlasAuditService.class)));
+
+ doNothing().when(spyImportService).processReplicationDeletion(any(), any());
+ doNothing().when(auditsWriter).write(anyString(), any(AtlasImportResult.class), anyLong(), anyLong(), any());
+ doNothing().when(spyImportService).addToImportOperationAudits(any());
+
+ boolean result = spyImportService.onImportEntity(entityWithExtInfo, importId, position);
+
+ assertTrue(result);
+ assertEquals(importRequest.getImportDetails().getImportedEntitiesCount(), 0);
+ assertEquals(importRequest.getImportDetails().getFailedEntitiesCount(), 10);
+ assertEquals(importRequest.getStatus(), FAILED);
+
+ verify(spyImportService, times(1)).processReplicationDeletion(any(), any());
+ verify(spyImportService, times(1)).addToImportOperationAudits(any());
+ }
+
+ @Test
+ public void testOnImportCompleteWhenProcessingReachesEndStatusIsFailureIfImportedEntityCountIsZero() throws AtlasBaseException {
+ String importId = "test-import-id";
+
+ AtlasAsyncImportRequest importRequest = new AtlasAsyncImportRequest();
+ AtlasImportResult importResult = new AtlasImportResult();
+ AtlasAsyncImportRequest.ImportDetails importDetails = new AtlasAsyncImportRequest.ImportDetails();
+ AtlasExportResult exportResult = new AtlasExportResult();
+
+ importDetails.setImportedEntitiesCount(0);
+ importDetails.setFailedEntitiesCount(5);
+ importDetails.setPublishedEntityCount(5);
+ importDetails.setTotalEntitiesCount(5);
+
+ importResult.setRequest(new AtlasImportRequest());
+ importResult.setExportResult(exportResult);
+ importRequest.setImportId(importId);
+ importRequest.setImportDetails(importDetails);
+ importRequest.setImportResult(importResult);
+ importRequest.setStatus(PROCESSING);
+
+ AsyncImportService asyncImportService = mock(AsyncImportService.class);
+ AuditsWriter auditsWriter = mock(AuditsWriter.class);
+
+ when(asyncImportService.fetchImportRequestByImportId(importId)).thenReturn(importRequest);
+
+ ImportService importService = new ImportService(
+ mock(AtlasTypeDefStore.class),
+ mock(AtlasTypeRegistry.class),
+ mock(BulkImporter.class),
+ auditsWriter,
+ mock(ImportTransformsShaper.class),
+ mock(TableReplicationRequestProcessor.class),
+ mock(AsyncImportTaskExecutor.class),
+ asyncImportService,
+ mock(AtlasAuditService.class));
+
+ importService.onImportComplete(importId);
+
+ assertEquals(importRequest.getStatus(), FAILED);
+ assertEquals(importResult.getOperationStatus(), AtlasImportResult.OperationStatus.FAIL);
+ }
+
@Test
public void importServiceProcessesIOException() {
- ImportService importService = new ImportService(typeDefStore, typeRegistry, null, null, null, null);
+ ImportService importService = new ImportService(typeDefStore, typeRegistry, null, null, null, null, null, null, atlasAuditService);
AtlasImportRequest req = mock(AtlasImportRequest.class);
-
Answer answer = invocationOnMock -> {
throw new IOException("file is read only");
};
diff --git a/repository/src/test/java/org/apache/atlas/repository/store/graph/v2/AsyncImportTaskExecutorTest.java b/repository/src/test/java/org/apache/atlas/repository/store/graph/v2/AsyncImportTaskExecutorTest.java
new file mode 100644
index 0000000000..6c731d0162
--- /dev/null
+++ b/repository/src/test/java/org/apache/atlas/repository/store/graph/v2/AsyncImportTaskExecutorTest.java
@@ -0,0 +1,466 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.repository.store.graph.v2;
+
+import org.apache.atlas.AtlasErrorCode;
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.kafka.NotificationProvider;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
+import org.apache.atlas.model.impexp.AtlasImportResult;
+import org.apache.atlas.model.instance.AtlasEntity;
+import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
+import org.apache.atlas.model.notification.MessageSource;
+import org.apache.atlas.notification.NotificationException;
+import org.apache.atlas.notification.NotificationInterface;
+import org.apache.atlas.repository.impexp.AsyncImportService;
+import org.apache.atlas.repository.store.graph.v2.asyncimport.ImportTaskListener;
+import org.mockito.Mock;
+import org.mockito.MockedStatic;
+import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Collections;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.anyList;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
+import static org.testng.Assert.assertSame;
+import static org.testng.Assert.expectThrows;
+
+public class AsyncImportTaskExecutorTest {
+ @Mock
+ private AsyncImportService importService;
+
+ @Mock
+ private NotificationInterface notificationInterface;
+
+ @Mock
+ private ImportTaskListener importTaskListener;
+
+ @Mock
+ private MessageSource messageSource;
+
+ private AsyncImportTaskExecutor asyncImportTaskExecutor;
+
+ @BeforeMethod
+ public void setup() {
+ MockitoAnnotations.openMocks(this);
+
+ try (MockedStatic mockedStatic = Mockito.mockStatic(NotificationProvider.class)) {
+ mockedStatic.when(NotificationProvider::get).thenReturn(notificationInterface);
+
+ when(messageSource.getSource()).thenReturn("AsyncImportTaskPublisher");
+
+ asyncImportTaskExecutor = new AsyncImportTaskExecutor(importService, importTaskListener);
+ }
+ }
+
+ @Test
+ void testRunSuccess() throws AtlasBaseException {
+ AtlasImportResult mockResult = mock(AtlasImportResult.class);
+ EntityImportStream mockEntityImportStream = mock(EntityImportStream.class);
+
+ when(mockEntityImportStream.getMd5Hash()).thenReturn("import-md5-hash");
+ when(mockEntityImportStream.size()).thenReturn(5);
+ when(mockEntityImportStream.getCreationOrder()).thenReturn(Collections.emptyList());
+ when(mockEntityImportStream.hasNext()).thenReturn(false);
+
+ when(importService.fetchImportRequestByImportId("import-md5-hash")).thenReturn(null);
+ doNothing().when(importService).saveImportRequest(any(AtlasAsyncImportRequest.class));
+
+ AtlasAsyncImportRequest result = asyncImportTaskExecutor.run(mockResult, mockEntityImportStream);
+
+ assertNotNull(result);
+ assertSame(result.getStatus(), AtlasAsyncImportRequest.ImportStatus.STAGING);
+ verify(mockEntityImportStream).close();
+ verify(importService).saveImportRequest(any(AtlasAsyncImportRequest.class));
+ }
+
+ @Test
+ void testRunDuplicateRequestInWaitingStatus() throws AtlasBaseException {
+ AtlasImportResult mockResult = mock(AtlasImportResult.class);
+ EntityImportStream mockEntityImportStream = mock(EntityImportStream.class);
+
+ when(mockEntityImportStream.getMd5Hash()).thenReturn("import-md5");
+ when(mockEntityImportStream.size()).thenReturn(10);
+ when(mockEntityImportStream.getCreationOrder()).thenReturn(Collections.emptyList());
+
+ AtlasAsyncImportRequest mockRequest = mock(AtlasAsyncImportRequest.class);
+ when(mockRequest.getStatus()).thenReturn(AtlasAsyncImportRequest.ImportStatus.WAITING);
+ when(importService.fetchImportRequestByImportId("import-md5")).thenReturn(mockRequest);
+
+ doNothing().when(mockEntityImportStream).close();
+
+ AtlasAsyncImportRequest result = asyncImportTaskExecutor.run(mockResult, mockEntityImportStream);
+
+ assertNotNull(result);
+ assertSame(result.getStatus(), AtlasAsyncImportRequest.ImportStatus.WAITING);
+ verify(mockEntityImportStream).close();
+ verify(importService, never()).saveImportRequest(any());
+ verify(importService, never()).updateImportRequest(any());
+
+ // Verify that skipToPosition and publishImportRequest are NOT called
+ AsyncImportTaskExecutor spyPublisher = spy(asyncImportTaskExecutor);
+
+ doNothing().when(spyPublisher).skipToStartEntityPosition(any(), any());
+ doNothing().when(spyPublisher).publishImportRequest(any(), any());
+
+ verify(spyPublisher, never()).skipToStartEntityPosition(mockRequest, mockEntityImportStream);
+ verify(spyPublisher, never()).publishImportRequest(mockRequest, mockEntityImportStream);
+ }
+
+ @Test
+ void testRunDuplicateRequestInProcessingStatus() throws AtlasBaseException {
+ AtlasImportResult mockResult = mock(AtlasImportResult.class);
+ EntityImportStream mockEntityImportStream = mock(EntityImportStream.class);
+
+ when(mockEntityImportStream.getMd5Hash()).thenReturn("import-md5");
+ when(mockEntityImportStream.size()).thenReturn(10);
+ when(mockEntityImportStream.getCreationOrder()).thenReturn(Collections.emptyList());
+
+ AtlasAsyncImportRequest mockRequest = mock(AtlasAsyncImportRequest.class);
+
+ when(mockRequest.getStatus()).thenReturn(AtlasAsyncImportRequest.ImportStatus.PROCESSING);
+ when(importService.fetchImportRequestByImportId("import-md5")).thenReturn(mockRequest);
+
+ doNothing().when(mockEntityImportStream).close();
+
+ AtlasAsyncImportRequest result = asyncImportTaskExecutor.run(mockResult, mockEntityImportStream);
+
+ assertNotNull(result);
+ assertSame(result.getStatus(), AtlasAsyncImportRequest.ImportStatus.PROCESSING);
+ verify(mockEntityImportStream).close();
+ verify(importService, never()).saveImportRequest(any());
+ verify(importService, never()).updateImportRequest(any());
+
+ // Verify that skipToPosition and publishImportRequest are NOT called
+ AsyncImportTaskExecutor spyPublisher = spy(asyncImportTaskExecutor);
+
+ doNothing().when(spyPublisher).skipToStartEntityPosition(any(), any());
+ doNothing().when(spyPublisher).publishImportRequest(any(), any());
+
+ verify(spyPublisher, never()).skipToStartEntityPosition(mockRequest, mockEntityImportStream);
+ verify(spyPublisher, never()).publishImportRequest(mockRequest, mockEntityImportStream);
+ }
+
+ @Test
+ void testPublishImportRequestHappyPath() throws AtlasBaseException {
+ AtlasAsyncImportRequest mockImportRequest = mock(AtlasAsyncImportRequest.class);
+ AtlasImportResult mockResult = mock(AtlasImportResult.class);
+ EntityImportStream mockEntityImportStream = mock(EntityImportStream.class);
+
+ when(mockImportRequest.getTopicName()).thenReturn("test-topic");
+ when(mockImportRequest.getImportId()).thenReturn("import-md5");
+ when(mockImportRequest.getImportDetails()).thenReturn(new AtlasAsyncImportRequest.ImportDetails());
+ when(mockImportRequest.getImportResult()).thenReturn(mockResult);
+ when(mockResult.getUserName()).thenReturn("test-user-1");
+ when(mockEntityImportStream.getTypesDef()).thenReturn(null);
+
+ asyncImportTaskExecutor.publishImportRequest(mockImportRequest, mockEntityImportStream);
+
+ verify(importService).updateImportRequest(mockImportRequest);
+ verify(notificationInterface).closeProducer(NotificationInterface.NotificationType.ASYNC_IMPORT, "test-topic");
+ verify(importTaskListener).onReceiveImportRequest(mockImportRequest);
+ }
+
+ @Test
+ void testPublishImportRequestTypeDefNotificationException() throws AtlasBaseException, NotificationException {
+ AtlasAsyncImportRequest mockImportRequest = mock(AtlasAsyncImportRequest.class);
+ AtlasImportResult mockResult = mock(AtlasImportResult.class);
+ EntityImportStream mockEntityImportStream = mock(EntityImportStream.class);
+
+ when(mockImportRequest.getTopicName()).thenReturn("test-topic");
+ when(mockImportRequest.getImportId()).thenReturn("import-md5");
+ when(mockImportRequest.getImportResult()).thenReturn(mockResult);
+ when(mockResult.getUserName()).thenReturn("test-user-1");
+ when(mockEntityImportStream.getTypesDef()).thenReturn(null);
+
+ doThrow(new NotificationException(new Exception("some notification exception")))
+ .when(notificationInterface)
+ .send(eq("test-topic"), anyList(), any());
+
+ try {
+ asyncImportTaskExecutor.publishImportRequest(mockImportRequest, mockEntityImportStream);
+ } catch (AtlasBaseException ignored) {
+ // Ignored for this test
+ }
+
+ verify(importTaskListener, never()).onReceiveImportRequest(any(AtlasAsyncImportRequest.class));
+ verify(notificationInterface).closeProducer(NotificationInterface.NotificationType.ASYNC_IMPORT, "test-topic");
+ }
+
+ @Test
+ void testPublishEntityNotificationHappyPath() throws NotificationException {
+ AtlasAsyncImportRequest mockImportRequest = mock(AtlasAsyncImportRequest.class);
+ AtlasImportResult mockResult = mock(AtlasImportResult.class);
+ EntityImportStream mockEntityImportStream = mock(EntityImportStream.class);
+ AtlasEntityWithExtInfo mockEntityWithExtInfo = mock(AtlasEntityWithExtInfo.class);
+
+ when(mockImportRequest.getTopicName()).thenReturn("test-topic");
+ when(mockImportRequest.getImportId()).thenReturn("import-id");
+ when(mockImportRequest.getImportDetails()).thenReturn(new AtlasAsyncImportRequest.ImportDetails());
+ when(mockImportRequest.getImportTrackingInfo()).thenReturn(new AtlasAsyncImportRequest.ImportTrackingInfo());
+ when(mockImportRequest.getImportResult()).thenReturn(mockResult);
+ when(mockResult.getUserName()).thenReturn("test-user-1");
+ when(mockEntityImportStream.hasNext()).thenReturn(true, false); // One entity in the stream
+ when(mockEntityImportStream.getNextEntityWithExtInfo()).thenReturn(mockEntityWithExtInfo);
+ when(mockEntityImportStream.getPosition()).thenReturn(1);
+
+ AtlasEntity mockEntity = mock(AtlasEntity.class);
+
+ when(mockEntityWithExtInfo.getEntity()).thenReturn(mockEntity);
+ when(mockEntity.getGuid()).thenReturn("entity-guid");
+
+ asyncImportTaskExecutor.publishEntityNotification(mockImportRequest, mockEntityImportStream);
+
+ verify(notificationInterface).send(eq("test-topic"), anyList(), any());
+ verify(mockEntityImportStream).onImportComplete("entity-guid");
+ verify(importService).updateImportRequest(mockImportRequest);
+ assertEquals(mockImportRequest.getImportTrackingInfo().getStartEntityPosition(), 1);
+ assertEquals(mockImportRequest.getImportDetails().getPublishedEntityCount(), 1);
+ }
+
+ @Test
+ void testPublishEntityNotificationNullEntity() throws NotificationException {
+ AtlasAsyncImportRequest mockImportRequest = mock(AtlasAsyncImportRequest.class);
+ EntityImportStream mockEntityImportStream = mock(EntityImportStream.class);
+
+ when(mockImportRequest.getImportDetails()).thenReturn(new AtlasAsyncImportRequest.ImportDetails());
+ when(mockImportRequest.getImportTrackingInfo()).thenReturn(new AtlasAsyncImportRequest.ImportTrackingInfo());
+ when(mockEntityImportStream.hasNext()).thenReturn(true, false); // One entity in the stream
+ when(mockEntityImportStream.getNextEntityWithExtInfo()).thenReturn(null);
+ when(mockEntityImportStream.getPosition()).thenReturn(1);
+
+ asyncImportTaskExecutor.publishEntityNotification(mockImportRequest, mockEntityImportStream);
+
+ verify(notificationInterface, never()).send(anyString(), anyList(), any());
+ verify(mockEntityImportStream, never()).onImportComplete(anyString());
+ verify(importService).updateImportRequest(mockImportRequest);
+ assertEquals(mockImportRequest.getImportTrackingInfo().getStartEntityPosition(), 1);
+ assertEquals(mockImportRequest.getImportDetails().getPublishedEntityCount(), 0);
+ }
+
+ @Test
+ void testPublishEntityNotificationExceptionInSendToTopic() throws NotificationException {
+ AtlasAsyncImportRequest mockImportRequest = mock(AtlasAsyncImportRequest.class);
+ AtlasImportResult mockResult = mock(AtlasImportResult.class);
+ EntityImportStream mockEntityImportStream = mock(EntityImportStream.class);
+ AtlasEntityWithExtInfo mockEntityWithExtInfo = mock(AtlasEntityWithExtInfo.class);
+ AtlasEntity mockEntity = mock(AtlasEntity.class);
+
+ when(mockImportRequest.getTopicName()).thenReturn("test-topic");
+ when(mockImportRequest.getImportId()).thenReturn("import-id");
+ when(mockImportRequest.getImportDetails()).thenReturn(new AtlasAsyncImportRequest.ImportDetails());
+ when(mockImportRequest.getImportTrackingInfo()).thenReturn(new AtlasAsyncImportRequest.ImportTrackingInfo());
+ when(mockImportRequest.getImportResult()).thenReturn(mockResult);
+ when(mockResult.getUserName()).thenReturn("test-user-1");
+ when(mockEntityImportStream.getPosition()).thenReturn(1);
+ when(mockEntityImportStream.hasNext()).thenReturn(true, false);
+ when(mockEntityImportStream.getNextEntityWithExtInfo()).thenReturn(mockEntityWithExtInfo);
+ when(mockEntityWithExtInfo.getEntity()).thenReturn(mockEntity);
+ when(mockEntity.getGuid()).thenReturn("entity-guid");
+
+ doThrow(new NotificationException(new Exception("Error in sendToTopic")))
+ .when(notificationInterface)
+ .send(eq("test-topic"), anyList(), any());
+
+ asyncImportTaskExecutor.publishEntityNotification(mockImportRequest, mockEntityImportStream);
+
+ verify(notificationInterface).send(eq("test-topic"), anyList(), any());
+ verify(mockEntityImportStream, never()).onImportComplete("entity-guid");
+ verify(importService).updateImportRequest(mockImportRequest);
+ assertEquals(mockImportRequest.getImportTrackingInfo().getStartEntityPosition(), 1);
+ assertEquals(mockImportRequest.getImportDetails().getFailedEntitiesCount(), 1);
+ assertEquals(mockImportRequest.getImportDetails().getPublishedEntityCount(), 0);
+ }
+
+ @Test
+ void testPublishEntityNotificationIgnoreFailedEntityAndProcessNext() throws NotificationException {
+ AtlasAsyncImportRequest mockImportRequest = mock(AtlasAsyncImportRequest.class);
+ AtlasImportResult mockResult = mock(AtlasImportResult.class);
+ EntityImportStream mockEntityImportStream = mock(EntityImportStream.class);
+ AtlasEntityWithExtInfo mockEntityWithExtInfo = mock(AtlasEntityWithExtInfo.class);
+ AtlasEntity mockEntity = mock(AtlasEntity.class);
+
+ when(mockImportRequest.getTopicName()).thenReturn("test-topic");
+ when(mockImportRequest.getImportId()).thenReturn("import-id");
+ when(mockImportRequest.getImportDetails()).thenReturn(new AtlasAsyncImportRequest.ImportDetails());
+ when(mockImportRequest.getImportTrackingInfo()).thenReturn(new AtlasAsyncImportRequest.ImportTrackingInfo());
+ when(mockImportRequest.getImportResult()).thenReturn(mockResult);
+ when(mockResult.getUserName()).thenReturn("test-user-1");
+ when(mockEntityImportStream.getPosition()).thenReturn(1, 2);
+ when(mockEntityImportStream.hasNext()).thenReturn(true, true, false); // Two entities
+ when(mockEntityImportStream.getNextEntityWithExtInfo()).thenReturn(mockEntityWithExtInfo);
+ when(mockEntityWithExtInfo.getEntity()).thenReturn(mockEntity);
+ when(mockEntity.getGuid()).thenReturn("entity-guid");
+
+ doThrow(new NotificationException(new Exception("Error in sendToTopic")))
+ .doNothing()
+ .when(notificationInterface)
+ .send(eq("test-topic"), anyList(), any());
+
+ asyncImportTaskExecutor.publishEntityNotification(mockImportRequest, mockEntityImportStream);
+
+ verify(notificationInterface, times(2)).send(eq("test-topic"), anyList(), any());
+ verify(mockEntityImportStream, times(1)).onImportComplete("entity-guid");
+ verify(importService, times(2)).updateImportRequest(mockImportRequest);
+ assertEquals(mockImportRequest.getImportDetails().getPublishedEntityCount(), 1);
+ assertEquals(mockImportRequest.getImportDetails().getFailedEntitiesCount(), 1);
+ }
+
+ @Test
+ void testSkipToPositionHappyPath() {
+ AtlasAsyncImportRequest mockImportRequest = mock(AtlasAsyncImportRequest.class);
+ EntityImportStream mockEntityImportStream = mock(EntityImportStream.class);
+
+ when(mockImportRequest.getImportTrackingInfo()).thenReturn(new AtlasAsyncImportRequest.ImportTrackingInfo("", 3)); // Skip to position 3
+ when(mockEntityImportStream.hasNext()).thenReturn(true, true, true, true, false); // 4 entities in total
+ when(mockEntityImportStream.getPosition()).thenReturn(0, 1, 2, 3);
+
+ asyncImportTaskExecutor.skipToStartEntityPosition(mockImportRequest, mockEntityImportStream);
+
+ verify(mockEntityImportStream, times(3)).next(); // Skip 3 entities
+ }
+
+ @Test
+ void testSkipToPositionSkipToGreaterThanTotalEntities() {
+ AtlasAsyncImportRequest mockImportRequest = mock(AtlasAsyncImportRequest.class);
+ EntityImportStream mockEntityImportStream = mock(EntityImportStream.class);
+
+ when(mockImportRequest.getImportTrackingInfo()).thenReturn(new AtlasAsyncImportRequest.ImportTrackingInfo("", 10)); // Skip to position 10
+ when(mockEntityImportStream.hasNext()).thenReturn(true, true, true, false); // 3 entities in total
+ when(mockEntityImportStream.getPosition()).thenReturn(0, 1, 2);
+
+ asyncImportTaskExecutor.skipToStartEntityPosition(mockImportRequest, mockEntityImportStream);
+
+ verify(mockEntityImportStream, times(3)).next(); // Skipped all 3 entities
+ }
+
+ @Test
+ void testSkipToPositionNoEntitiesInStream() {
+ AtlasAsyncImportRequest mockImportRequest = mock(AtlasAsyncImportRequest.class);
+ EntityImportStream mockEntityImportStream = mock(EntityImportStream.class);
+
+ when(mockImportRequest.getImportTrackingInfo()).thenReturn(new AtlasAsyncImportRequest.ImportTrackingInfo("", 3)); // Skip to position 3
+ when(mockEntityImportStream.hasNext()).thenReturn(false); // No entities in the stream
+
+ asyncImportTaskExecutor.skipToStartEntityPosition(mockImportRequest, mockEntityImportStream);
+
+ verify(mockEntityImportStream, never()).next(); // No entities to skip
+ }
+
+ @Test
+ void testSkipToPositionSkipToEqualsCurrentPosition() {
+ AtlasAsyncImportRequest mockImportRequest = mock(AtlasAsyncImportRequest.class);
+ EntityImportStream mockEntityImportStream = mock(EntityImportStream.class);
+
+ when(mockImportRequest.getImportTrackingInfo()).thenReturn(new AtlasAsyncImportRequest.ImportTrackingInfo("", 2)); // Skip to position 2
+ when(mockEntityImportStream.hasNext()).thenReturn(true); // At least one entity in the stream
+ when(mockEntityImportStream.getPosition()).thenReturn(2); // Already at position 2
+
+ asyncImportTaskExecutor.skipToStartEntityPosition(mockImportRequest, mockEntityImportStream);
+
+ verify(mockEntityImportStream, never()).next(); // No entities skipped since current position matches skipTo
+ }
+
+ @DataProvider(name = "registerRequestScenarios")
+ public Object[][] registerRequestScenarios() {
+ return new Object[][] {{"null", "NEW"},
+ {"SUCCESSFUL", "NEW"},
+ {"PARTIAL_SUCCESS", "NEW"},
+ {"FAILED", "NEW"},
+ {"ABORTED", "NEW"},
+ {"STAGING", "RESUMED"},
+ {"WAITING", "EXISTING"},
+ {"PROCESSING", "EXISTING"}
+ };
+ }
+
+ @Test(dataProvider = "registerRequestScenarios")
+ public void testRegisterRequest(String existingStatus, String expectedOutcome) throws AtlasBaseException {
+ AtlasImportResult mockResult = mock(AtlasImportResult.class);
+ AtlasAsyncImportRequest existingRequest = null;
+
+ if (!"null".equals(existingStatus)) {
+ existingRequest = mock(AtlasAsyncImportRequest.class);
+
+ when(existingRequest.getStatus()).thenReturn(AtlasAsyncImportRequest.ImportStatus.valueOf(existingStatus));
+ when(existingRequest.getImportDetails()).thenReturn(new AtlasAsyncImportRequest.ImportDetails());
+ }
+
+ when(importService.fetchImportRequestByImportId("import-id")).thenReturn(existingRequest);
+
+ AtlasAsyncImportRequest result = asyncImportTaskExecutor.registerRequest(mockResult, "import-id", 10, Collections.emptyList());
+
+ assertNotNull(result);
+
+ if ("NEW".equals(expectedOutcome)) {
+ verify(importService).saveImportRequest(any(AtlasAsyncImportRequest.class));
+ } else if ("RESUMED".equals(expectedOutcome)) {
+ verify(existingRequest).setReceivedTime(any(long.class));
+ verify(importService).updateImportRequest(existingRequest);
+ } else if ("EXISTING".equals(expectedOutcome)) {
+ verify(importService, never()).saveImportRequest(any(AtlasAsyncImportRequest.class));
+ verify(importService, never()).updateImportRequest(any(AtlasAsyncImportRequest.class));
+ }
+ }
+
+ @Test
+ public void testRegisterRequestThrowsException() throws AtlasBaseException {
+ AtlasImportResult mockResult = mock(AtlasImportResult.class);
+ AtlasAsyncImportRequest mockImportRequest = mock(AtlasAsyncImportRequest.class);
+
+ when(mockImportRequest.getStatus()).thenReturn(AtlasAsyncImportRequest.ImportStatus.SUCCESSFUL);
+ when(mockImportRequest.getImportDetails()).thenReturn(new AtlasAsyncImportRequest.ImportDetails());
+ when(importService.fetchImportRequestByImportId("import-id")).thenReturn(mockImportRequest);
+ doThrow(new AtlasBaseException("Some error while saving")).when(importService).saveImportRequest(any(AtlasAsyncImportRequest.class));
+
+ AtlasBaseException exception = expectThrows(AtlasBaseException.class, () -> asyncImportTaskExecutor.registerRequest(mockResult, "import-id", 10, Collections.emptyList()));
+
+ assertEquals(exception.getAtlasErrorCode(), AtlasErrorCode.IMPORT_REGISTRATION_FAILED);
+ }
+
+ @Test
+ public void testAbortAsyncImportRequest() throws AtlasBaseException {
+ AtlasAsyncImportRequest mockImportRequest = mock(AtlasAsyncImportRequest.class);
+
+ when(mockImportRequest.getTopicName()).thenReturn("ATLAS_IMPORT_12344");
+ when(importService.abortImport(any(String.class))).thenReturn(mockImportRequest);
+ doNothing().when(notificationInterface).deleteTopic(any(NotificationInterface.NotificationType.class), any(String.class));
+
+ asyncImportTaskExecutor.abortAsyncImportRequest("12344");
+
+ verify(importService, times(1)).abortImport("12344");
+ verify(notificationInterface, times(1)).deleteTopic(NotificationInterface.NotificationType.ASYNC_IMPORT, "ATLAS_IMPORT_12344");
+ }
+}
diff --git a/server-api/src/main/java/org/apache/atlas/listener/ActiveStateChangeHandler.java b/server-api/src/main/java/org/apache/atlas/listener/ActiveStateChangeHandler.java
index d49162efab..5135c25001 100644
--- a/server-api/src/main/java/org/apache/atlas/listener/ActiveStateChangeHandler.java
+++ b/server-api/src/main/java/org/apache/atlas/listener/ActiveStateChangeHandler.java
@@ -34,7 +34,8 @@ enum HandlerOrder {
DEFAULT_METADATA_SERVICE(4),
NOTIFICATION_HOOK_CONSUMER(5),
TASK_MANAGEMENT(6),
- INDEX_RECOVERY(7);
+ INDEX_RECOVERY(7),
+ IMPORT_TASK_LISTENER(8);
private final int order;
diff --git a/test-tools/src/main/resources/solr/core-template/solrconfig.xml b/test-tools/src/main/resources/solr/core-template/solrconfig.xml
index 7a0e8dd16a..c7ee388c7b 100644
--- a/test-tools/src/main/resources/solr/core-template/solrconfig.xml
+++ b/test-tools/src/main/resources/solr/core-template/solrconfig.xml
@@ -445,7 +445,7 @@
-->
edismax
- 35x_t 5j9_t 7wl_t a9x_t but_t dfp_l f0l_t i6d_l iyt_l jr9_t kjp_s lc5_t m4l_s mx1_t ohx_t xz9_i 1151_t 12px_t 14at_l 15vp_t 1891_t 19tx_t 1bet_t 1czp_t 1ekl_t 1gxx_t 1iit_l 1k3p_t 1lol_t 1o1x_t 1qf9_t 1ssl_t 1v5x_t 1wqt_t 1z45_t 4h6t_t 4ttx_s 4s91_s 4pvp_s 4oat_s 4rgl_t 4zd1_t 4vet_t 4wzp_l 4xs5_t 543p_t 52it_t 55ol_t 5csl_t 5ibp_t 5n2d_t 5kp1_t 5lhh_t 5jwl_t 5tdx_t 5slh_t 60hx_l 622t_l 5u6d_t 5zph_t 6arp_l 6ccl_l 696t_t 67lx_t 6dxh_t 63np_t 64g5_t 6611_t 6o79_t 6ozp_l 6l1h_t 6hvp_l 6jgl_l 6epx_t 6mmd_t 9e6d_t 969x_t 9fr9_t 9bt1_t 9eyt_t 9ddx_t 9p8l_t 9hc5_t 9rlx_t 9mv9_t 9og5_t 9qth_t a139_t 9t6t_t 9ypx_t a0at_t eebp_t e77p_t ebyd_t edj9_t eadh_t e5mt_l e8sl_t eb5x_t e6f9_l efwl_l ekn9_t elfp_t ef45_l egp1_t ehhh_i ejut_t eolh_t ent1_t epdx_t eqyt_i eux1_t ewhx_t f0g5_t f18l_i eyv9_t f2th_t f211_t f7k5_t f9xh_t f56t_t f8cl_t f6rp_t f3lx_l f951_t f4ed_l gyrp_t hamd_l hc79_i h539_t hczp_i hds5_i hekl_f hkw5_d hk3p_l
+ 35x_t 5j9_t 7wl_t a9x_t but_t dfp_l f0l_t i6d_l iyt_l jr9_t kjp_s lc5_t m4l_s mx1_t ohx_t xz9_i 1151_t 12px_t 14at_l 15vp_t 1891_t 19tx_t 1bet_t 1czp_t 1ekl_t 1gxx_t 1iit_l 1k3p_t 1lol_t 1o1x_t 1qf9_t 1ssl_t 1v5x_t 1wqt_t 1z45_t 20p1_t 4ttx_t 56h1_s 54w5_s 52it_s 50xx_s 543p_t 5c05_t 581x_t 59mt_l 5af9_t 5gqt_t 5f5x_t 5ibp_t 5pfp_t 5uyt_t 5zph_t 5xc5_t 5y4l_t 5wjp_t 6611_t 658l_t 6d51_l 6epx_l 66th_t 6ccl_t 6net_l 6ozp_l 6ltx_t 6k91_t 6qkl_t 6gat_t 6h39_t 6io5_t 70ud_t 71mt_l 6xol_t 6uit_l 6w3p_l 6rd1_t 6z9h_t 7e9x_t 7f2d_t 7dhh_t 7bwl_t 737p_t 7abp_t 7b45_t 7cp1_t 78qt_t a3gl_t 9vk5_t a51h_t a139_t a491_t a2o5_t aeit_t a6md_t agw5_t ac5h_t adqd_t ag3p_t aqdh_t aih1_t ao05_t apl1_t f3lx_t ewhx_t f18l_t f2th_t eznp_t eux1_l ey2t_t f0g5_t evph_l f56t_l f9xh_t fapx_t f4ed_l f5z9_t f6rp_i f951_t fdvp_t fd39_t feo5_t fg91_i fk79_t fls5_t fpqd_t fqit_i fo5h_t fs3p_t frb9_t fwud_t fz7p_t fuh1_t fxmt_t fw1x_t fsw5_l fyf9_t ftol_l ho1x_l hkw5_t hmh1_l hs05_l hssl_t hvyd_i hybp_t hudh_t j091_t jc3p_l jdol_i j6kl_t jeh1_i jf9h_i jg1x_f jmdh_d jll1_l kirp_t koat_l krgl_t kumd_l kw79_t kt1h_t kxs5_l kzd1_t
*
true
true
diff --git a/webapp/src/main/java/org/apache/atlas/notification/ImportTaskListenerImpl.java b/webapp/src/main/java/org/apache/atlas/notification/ImportTaskListenerImpl.java
new file mode 100644
index 0000000000..befa797515
--- /dev/null
+++ b/webapp/src/main/java/org/apache/atlas/notification/ImportTaskListenerImpl.java
@@ -0,0 +1,387 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.notification;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.atlas.ApplicationProperties;
+import org.apache.atlas.AtlasException;
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.ha.HAConfiguration;
+import org.apache.atlas.listener.ActiveStateChangeHandler;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus;
+import org.apache.atlas.repository.impexp.AsyncImportService;
+import org.apache.atlas.repository.store.graph.v2.asyncimport.ImportTaskListener;
+import org.apache.atlas.service.Service;
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.lang.ObjectUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.context.annotation.DependsOn;
+import org.springframework.core.annotation.Order;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PreDestroy;
+import javax.inject.Inject;
+
+import java.util.List;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.atlas.AtlasConfiguration.ASYNC_IMPORT_TOPIC_PREFIX;
+import static org.apache.atlas.AtlasErrorCode.IMPORT_QUEUEING_FAILED;
+
+@Component
+@Order(8)
+@DependsOn(value = "notificationHookConsumer")
+public class ImportTaskListenerImpl implements Service, ActiveStateChangeHandler, ImportTaskListener {
+ private static final Logger LOG = LoggerFactory.getLogger(ImportTaskListenerImpl.class);
+
+ private static final String THREADNAME_PREFIX = ImportTaskListener.class.getSimpleName();
+ private static final int ASYNC_IMPORT_PERMITS = 1; // Only one asynchronous import task is permitted
+
+ private final BlockingQueue requestQueue; // Blocking queue for requests
+ private final ExecutorService executorService; // Single-thread executor for sequential processing
+ private final AsyncImportService asyncImportService;
+ private final NotificationHookConsumer notificationHookConsumer;
+ private final Semaphore asyncImportSemaphore;
+ private final Configuration applicationProperties;
+
+ @Inject
+ public ImportTaskListenerImpl(AsyncImportService asyncImportService, NotificationHookConsumer notificationHookConsumer) throws AtlasException {
+ this(asyncImportService, notificationHookConsumer, new LinkedBlockingQueue<>());
+ }
+
+ public ImportTaskListenerImpl(AsyncImportService asyncImportService, NotificationHookConsumer notificationHookConsumer, BlockingQueue requestQueue) throws AtlasException {
+ this.asyncImportService = asyncImportService;
+ this.notificationHookConsumer = notificationHookConsumer;
+ this.requestQueue = requestQueue;
+ this.asyncImportSemaphore = new Semaphore(ASYNC_IMPORT_PERMITS);
+ this.applicationProperties = ApplicationProperties.get();
+ this.executorService = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(THREADNAME_PREFIX + " thread-%d")
+ .setUncaughtExceptionHandler((thread, throwable) -> LOG.error("Uncaught exception in thread {}: {}", thread.getName(), throwable.getMessage(), throwable)).build());
+ }
+
+ @Override
+ public void start() throws AtlasException {
+ if (HAConfiguration.isHAEnabled(applicationProperties)) {
+ LOG.info("HA is enabled, not starting import consumers inline.");
+
+ return;
+ }
+
+ startInternal();
+ }
+
+ private void startInternal() {
+ CompletableFuture populateTask = CompletableFuture.runAsync(this::populateRequestQueue)
+ .exceptionally(ex -> {
+ LOG.error("Failed to populate request queue", ex);
+ return null;
+ });
+
+ CompletableFuture resumeTask = CompletableFuture.runAsync(this::resumeInProgressImports)
+ .exceptionally(ex -> {
+ LOG.error("Failed to resume in-progress imports", ex);
+ return null;
+ });
+
+ // Wait for both tasks to complete before proceeding
+ CompletableFuture.allOf(populateTask, resumeTask)
+ .thenRun(this::startNextImportInQueue)
+ .exceptionally(ex -> {
+ LOG.error("Failed to start next import in queue", ex);
+ return null;
+ }).join();
+ }
+
+ @Override
+ public void onReceiveImportRequest(AtlasAsyncImportRequest importRequest) throws AtlasBaseException {
+ try {
+ LOG.info("==> onReceiveImportRequest(atlasAsyncImportRequest={})", importRequest);
+
+ importRequest.setStatus(ImportStatus.WAITING);
+
+ asyncImportService.updateImportRequest(importRequest);
+ requestQueue.put(importRequest.getImportId());
+
+ startNextImportInQueue();
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+
+ LOG.warn("Failed to add import request: {} to the queue", importRequest.getImportId());
+
+ throw new AtlasBaseException(IMPORT_QUEUEING_FAILED, e, importRequest.getImportId());
+ } finally {
+ LOG.info("<== onReceiveImportRequest(atlasAsyncImportRequest={})", importRequest);
+ }
+ }
+
+ @Override
+ public void onCompleteImportRequest(String importId) {
+ LOG.info("==> onCompleteImportRequest(importId={})", importId);
+
+ try {
+ notificationHookConsumer.closeImportConsumer(importId, ASYNC_IMPORT_TOPIC_PREFIX.getString() + importId);
+ } finally {
+ releaseAsyncImportSemaphore();
+ startNextImportInQueue();
+
+ LOG.info("<== onCompleteImportRequest(importId={})", importId);
+ }
+ }
+
+ private void startNextImportInQueue() {
+ LOG.info("==> startNextImportInQueue()");
+
+ startAsyncImportIfAvailable(null);
+
+ LOG.info("<== startNextImportInQueue()");
+ }
+
+ @VisibleForTesting
+ void startAsyncImportIfAvailable(String importId) {
+ LOG.info("==> startAsyncImportIfAvailable()");
+
+ try {
+ if (!asyncImportSemaphore.tryAcquire()) {
+ LOG.info("An async import is in progress, import request is queued");
+
+ return;
+ }
+
+ AtlasAsyncImportRequest nextImport = (importId != null) ? asyncImportService.fetchImportRequestByImportId(importId) : getNextImportFromQueue();
+
+ if (isNotValidImportRequest(nextImport)) {
+ releaseAsyncImportSemaphore();
+ return;
+ }
+
+ executorService.submit(() -> startImportConsumer(nextImport));
+ } catch (Exception e) {
+ LOG.error("Error while starting the next import, releasing the lock if held", e);
+
+ releaseAsyncImportSemaphore();
+ } finally {
+ LOG.info("<== startAsyncImportIfAvailable()");
+ }
+ }
+
+ private void startImportConsumer(AtlasAsyncImportRequest importRequest) {
+ try {
+ LOG.info("==> startImportConsumer(atlasAsyncImportRequest={})", importRequest);
+
+ notificationHookConsumer.startAsyncImportConsumer(NotificationInterface.NotificationType.ASYNC_IMPORT, importRequest.getImportId(), importRequest.getTopicName());
+
+ importRequest.setStatus(ImportStatus.PROCESSING);
+ importRequest.setProcessingStartTime(System.currentTimeMillis());
+ } catch (Exception e) {
+ LOG.error("Failed to start consumer for import: {}, marking import as failed", importRequest, e);
+
+ importRequest.setStatus(ImportStatus.FAILED);
+ } finally {
+ asyncImportService.updateImportRequest(importRequest);
+
+ if (ObjectUtils.equals(importRequest.getStatus(), ImportStatus.FAILED)) {
+ onCompleteImportRequest(importRequest.getImportId());
+ }
+
+ LOG.info("<== startImportConsumer(atlasAsyncImportRequest={})", importRequest);
+ }
+ }
+
+ @VisibleForTesting
+ AtlasAsyncImportRequest getNextImportFromQueue() {
+ LOG.info("==> getNextImportFromQueue()");
+
+ final int maxRetries = 5;
+ int retryCount = 0;
+ AtlasAsyncImportRequest nextImport = null;
+
+ while (retryCount < maxRetries) {
+ try {
+ String importId = requestQueue.poll(10, TimeUnit.SECONDS);
+
+ if (importId == null) {
+ retryCount++;
+
+ LOG.warn("Still waiting for import request... (attempt {} of {})", retryCount, maxRetries);
+
+ continue;
+ }
+
+ // Reset retry count because we got a valid importId (even if it's invalid later)
+ retryCount = 0;
+
+ AtlasAsyncImportRequest importRequest = asyncImportService.fetchImportRequestByImportId(importId);
+
+ if (isNotValidImportRequest(importRequest)) {
+ LOG.info("Import request {}, is not in a valid status to start import, hence skipping..", importRequest);
+
+ continue;
+ }
+
+ LOG.info("<== getImportIdFromQueue(nextImportId={})", importRequest.getImportId());
+
+ return importRequest;
+ } catch (InterruptedException e) {
+ LOG.error("Thread interrupted while waiting for importId from the queue", e);
+ // Restore the interrupt flag
+ Thread.currentThread().interrupt();
+ return null;
+ }
+ }
+
+ LOG.error("Exceeded max retry attempts. Exiting...");
+
+ return null;
+ }
+
+ @VisibleForTesting
+ boolean isNotValidImportRequest(AtlasAsyncImportRequest importRequest) {
+ return importRequest == null ||
+ (!ImportStatus.WAITING.equals(importRequest.getStatus()) && !ImportStatus.PROCESSING.equals(importRequest.getStatus()));
+ }
+
+ private void releaseAsyncImportSemaphore() {
+ LOG.info("==> releaseAsyncImportSemaphore()");
+
+ if (asyncImportSemaphore.availablePermits() == 0) {
+ asyncImportSemaphore.release();
+
+ LOG.info("<== releaseAsyncImportSemaphore()");
+ } else {
+ LOG.info("<== releaseAsyncImportSemaphore(); no lock held");
+ }
+ }
+
+ void populateRequestQueue() {
+ LOG.info("==> populateRequestQueue()");
+
+ List importRequests = asyncImportService.fetchQueuedImportRequests();
+
+ try {
+ if (!importRequests.isEmpty()) {
+ for (String request : importRequests) {
+ try {
+ if (!requestQueue.offer(request, 5, TimeUnit.SECONDS)) { // Wait up to 5 sec
+ LOG.warn("populateRequestQueue(): Request {} could not be added to the queue", request);
+ }
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+
+ LOG.error("populateRequestQueue(): Failed to add requests to queue");
+
+ break; // Exit loop on interruption
+ }
+ }
+
+ LOG.info("populateRequestQueue(): Added {} requests to queue", importRequests.size());
+ } else {
+ LOG.warn("populateRequestQueue(): No queued requests found.");
+ }
+ } finally {
+ LOG.info("<== populateRequestQueue()");
+ }
+ }
+
+ private void resumeInProgressImports() {
+ LOG.info("==> resumeInProgressImports()");
+
+ try {
+ String importId = asyncImportService.fetchInProgressImportIds().stream().findFirst().orElse(null);
+
+ if (importId == null) {
+ LOG.warn("No imports found to resume");
+
+ return;
+ }
+
+ LOG.info("Resuming import id={}", importId);
+
+ startAsyncImportIfAvailable(importId);
+ } finally {
+ LOG.info("<== resumeInProgressImports()");
+ }
+ }
+
+ @PreDestroy
+ public void stopImport() {
+ LOG.info("Shutting down import processor...");
+
+ executorService.shutdown(); // Initiate an orderly shutdown
+
+ try {
+ if (!executorService.awaitTermination(30, TimeUnit.SECONDS)) {
+ LOG.warn("Executor service did not terminate gracefully within the timeout. Waiting longer...");
+
+ // Retry shutdown before forcing it
+ if (!executorService.awaitTermination(10, TimeUnit.SECONDS)) {
+ LOG.warn("Forcing shutdown...");
+
+ executorService.shutdownNow();
+ }
+ }
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+
+ LOG.error("Shutdown interrupted. Forcing shutdown...");
+
+ executorService.shutdownNow();
+ }
+
+ LOG.info("Import processor stopped.");
+ }
+
+ @Override
+ public void stop() throws AtlasException {
+ try {
+ stopImport();
+ } finally {
+ releaseAsyncImportSemaphore();
+ }
+ }
+
+ @Override
+ public void instanceIsActive() {
+ LOG.info("Reacting to active state: initializing Kafka consumers");
+
+ startInternal();
+ }
+
+ @Override
+ public void instanceIsPassive() {
+ try {
+ stopImport();
+ } finally {
+ releaseAsyncImportSemaphore();
+ }
+ }
+
+ @Override
+ public int getHandlerOrder() {
+ return ActiveStateChangeHandler.HandlerOrder.IMPORT_TASK_LISTENER.getOrder();
+ }
+}
diff --git a/webapp/src/main/java/org/apache/atlas/notification/NotificationHookConsumer.java b/webapp/src/main/java/org/apache/atlas/notification/NotificationHookConsumer.java
index 49d378f855..001e575799 100644
--- a/webapp/src/main/java/org/apache/atlas/notification/NotificationHookConsumer.java
+++ b/webapp/src/main/java/org/apache/atlas/notification/NotificationHookConsumer.java
@@ -43,12 +43,16 @@
import org.apache.atlas.model.notification.HookNotification.EntityDeleteRequestV2;
import org.apache.atlas.model.notification.HookNotification.EntityPartialUpdateRequestV2;
import org.apache.atlas.model.notification.HookNotification.EntityUpdateRequestV2;
+import org.apache.atlas.model.notification.ImportNotification.AtlasEntityImportNotification;
+import org.apache.atlas.model.notification.ImportNotification.AtlasTypesDefImportNotification;
+import org.apache.atlas.model.typedef.AtlasTypesDef;
import org.apache.atlas.notification.NotificationInterface.NotificationType;
import org.apache.atlas.notification.preprocessor.EntityPreprocessor;
import org.apache.atlas.notification.preprocessor.GenericEntityPreprocessor;
import org.apache.atlas.notification.preprocessor.PreprocessorContext;
import org.apache.atlas.notification.preprocessor.PreprocessorContext.PreprocessAction;
import org.apache.atlas.repository.converters.AtlasInstanceConverter;
+import org.apache.atlas.repository.impexp.AsyncImporter;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
import org.apache.atlas.repository.store.graph.EntityCorrelationStore;
import org.apache.atlas.repository.store.graph.v2.AtlasEntityStream;
@@ -80,6 +84,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.DependsOn;
+import org.springframework.context.annotation.Lazy;
import org.springframework.core.annotation.Order;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
@@ -100,11 +105,13 @@
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
+import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
+import java.util.concurrent.SynchronousQueue;
+import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Pattern;
@@ -170,9 +177,9 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
private static final int KAFKA_CONSUMER_SHUTDOWN_WAIT = 30000;
private static final String ATLAS_HOOK_CONSUMER_THREAD_NAME = "atlas-hook-consumer-thread";
private static final String ATLAS_HOOK_UNSORTED_CONSUMER_THREAD_NAME = "atlas-hook-unsorted-consumer-thread";
+ private static final String ATLAS_IMPORT_CONSUMER_THREAD_PREFIX = "atlas-import-consumer-thread-";
private static final String THREADNAME_PREFIX = NotificationHookConsumer.class.getSimpleName();
- @VisibleForTesting final int consumerRetryInterval;
private final AtlasEntityStore atlasEntityStore;
private final ServiceState serviceState;
private final AtlasInstanceConverter instanceConverter;
@@ -203,21 +210,25 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
private final boolean createShellEntityForNonExistingReference;
private final boolean authorizeUsingMessageUser;
private final Map authnCache;
- private final NotificationInterface notificationInterface;
- private final Configuration applicationProperties;
- private final Map lastCommittedPartitionOffset;
- private final EntityCorrelationManager entityCorrelationManager;
- private final long consumerMsgBufferingIntervalMS;
- private final int consumerMsgBufferingBatchSize;
-
- @VisibleForTesting
- List consumers;
+ private final NotificationInterface notificationInterface;
+ private final Configuration applicationProperties;
+ private final Map lastCommittedPartitionOffset;
+ private final EntityCorrelationManager entityCorrelationManager;
+ private final long consumerMsgBufferingIntervalMS;
+ private final int consumerMsgBufferingBatchSize;
+ private final AsyncImporter asyncImporter;
private ExecutorService executors;
private Instant nextStatsLogTime = AtlasMetricsCounter.getNextHourStartTime(Instant.now());
+ @VisibleForTesting
+ final int consumerRetryInterval;
+
+ @VisibleForTesting
+ List consumers;
+
@Inject
- public NotificationHookConsumer(NotificationInterface notificationInterface, AtlasEntityStore atlasEntityStore, ServiceState serviceState, AtlasInstanceConverter instanceConverter, AtlasTypeRegistry typeRegistry, AtlasMetricsUtil metricsUtil, EntityCorrelationStore entityCorrelationStore) throws AtlasException {
+ public NotificationHookConsumer(NotificationInterface notificationInterface, AtlasEntityStore atlasEntityStore, ServiceState serviceState, AtlasInstanceConverter instanceConverter, AtlasTypeRegistry typeRegistry, AtlasMetricsUtil metricsUtil, EntityCorrelationStore entityCorrelationStore, @Lazy AsyncImporter asyncImporter) throws AtlasException {
this.notificationInterface = notificationInterface;
this.atlasEntityStore = atlasEntityStore;
this.serviceState = serviceState;
@@ -226,6 +237,7 @@ public NotificationHookConsumer(NotificationInterface notificationInterface, Atl
this.applicationProperties = ApplicationProperties.get();
this.metricsUtil = metricsUtil;
this.lastCommittedPartitionOffset = new HashMap<>();
+ this.asyncImporter = asyncImporter;
maxRetries = applicationProperties.getInt(CONSUMER_RETRIES_PROPERTY, 3);
failedMsgCacheSize = applicationProperties.getInt(CONSUMER_FAILEDCACHESIZE_PROPERTY, 1);
@@ -370,12 +382,6 @@ public NotificationHookConsumer(NotificationInterface notificationInterface, Atl
@Override
public void start() throws AtlasException {
- if (consumerDisabled) {
- LOG.info("No hook messages will be processed. {} = {}", CONSUMER_DISABLED, consumerDisabled);
-
- return;
- }
-
startInternal(applicationProperties, null);
}
@@ -383,7 +389,7 @@ public void start() throws AtlasException {
public void stop() {
//Allow for completion of outstanding work
try {
- if (consumerDisabled) {
+ if (consumerDisabled && consumers.isEmpty()) {
return;
}
@@ -412,13 +418,18 @@ public void stop() {
*/
@Override
public void instanceIsActive() {
+ if (executors == null) {
+ executors = createExecutor();
+ LOG.info("Executors initialized (Instance is active)");
+ }
+
if (consumerDisabled) {
return;
}
LOG.info("Reacting to active state: initializing Kafka consumers");
- startConsumers(executors);
+ startHookConsumers();
}
/**
@@ -429,7 +440,7 @@ public void instanceIsActive() {
*/
@Override
public void instanceIsPassive() {
- if (consumerDisabled) {
+ if (consumerDisabled && consumers.isEmpty()) {
return;
}
@@ -443,6 +454,34 @@ public int getHandlerOrder() {
return HandlerOrder.NOTIFICATION_HOOK_CONSUMER.getOrder();
}
+ public void closeImportConsumer(String importId, String topic) {
+ try {
+ LOG.info("==> closeImportConsumer(importId={}, topic={})", importId, topic);
+//ATLAS_IMPORT_e22a73f9f6a16620a8655b36d71fb5be
+ String consumerName = ATLAS_IMPORT_CONSUMER_THREAD_PREFIX + importId;
+ ListIterator consumersIterator = consumers.listIterator();
+
+ while (consumersIterator.hasNext()) {
+ HookConsumer consumer = consumersIterator.next();
+
+ if (consumer.getName().startsWith(consumerName)) {
+ consumer.shutdown();
+ consumersIterator.remove();
+ }
+ }
+
+ notificationInterface.closeConsumer(NotificationInterface.NotificationType.ASYNC_IMPORT, topic);
+ notificationInterface.deleteTopic(NotificationInterface.NotificationType.ASYNC_IMPORT, topic);
+
+ lastCommittedPartitionOffset.entrySet().removeIf(entry -> topic.equals(entry.getKey().topic()));
+ } catch (Exception e) {
+ LOG.error("Could not cleanup consumers for importId: {}", importId, e);
+ } finally {
+ LOG.info("<== closeImportConsumer(importId={}, topic={})", importId, topic);
+ }
+ }
+
+ @VisibleForTesting
void startInternal(Configuration configuration, ExecutorService executorService) {
if (consumers == null) {
consumers = new ArrayList<>();
@@ -453,16 +492,25 @@ void startInternal(Configuration configuration, ExecutorService executorService)
}
if (!HAConfiguration.isHAEnabled(configuration)) {
+ if (executors == null) {
+ executors = createExecutor();
+ LOG.info("Executors initialized (HA is disabled)");
+ }
+ if (consumerDisabled) {
+ LOG.info("No hook messages will be processed. {} = {}", CONSUMER_DISABLED, consumerDisabled);
+ return;
+ }
+
LOG.info("HA is disabled, starting consumers inline.");
- startConsumers(executorService);
+ startHookConsumers();
}
}
- private void startConsumers(ExecutorService executorService) {
- int numThreads = applicationProperties.getInt(CONSUMER_THREADS_PROPERTY, 1);
+ @VisibleForTesting
+ void startHookConsumers() {
+ int numThreads = applicationProperties.getInt(CONSUMER_THREADS_PROPERTY, 1);
Map, NotificationType> notificationConsumersByType = new HashMap<>();
-
List> notificationConsumers = notificationInterface.createConsumers(NotificationType.HOOK, numThreads);
for (NotificationConsumer notificationConsumer : notificationConsumers) {
@@ -477,11 +525,7 @@ private void startConsumers(ExecutorService executorService) {
}
}
- if (executorService == null) {
- executorService = Executors.newFixedThreadPool(notificationConsumersByType.size(), new ThreadFactoryBuilder().setNameFormat(THREADNAME_PREFIX + " thread-%d").build());
- }
-
- executors = executorService;
+ List hookConsumers = new ArrayList<>();
for (final NotificationConsumer consumer : notificationConsumersByType.keySet()) {
String hookConsumerName = ATLAS_HOOK_CONSUMER_THREAD_NAME;
@@ -492,8 +536,52 @@ private void startConsumers(ExecutorService executorService) {
HookConsumer hookConsumer = new HookConsumer(hookConsumerName, consumer);
- consumers.add(hookConsumer);
- executors.submit(hookConsumer);
+ hookConsumers.add(hookConsumer);
+ }
+
+ startConsumers(hookConsumers);
+ }
+
+ public void startAsyncImportConsumer(NotificationType notificationType, String importId, String topic) throws AtlasBaseException {
+ if (topic != null) {
+ notificationInterface.addTopicToNotificationType(notificationType, topic);
+ }
+
+ List> notificationConsumers = notificationInterface.createConsumers(notificationType, 1);
+ List hookConsumers = new ArrayList<>();
+
+ for (final NotificationConsumer consumer : notificationConsumers) {
+ String hookConsumerName = ATLAS_IMPORT_CONSUMER_THREAD_PREFIX + importId;
+ HookConsumer hookConsumer = new HookConsumer(hookConsumerName, consumer);
+
+ hookConsumers.add(hookConsumer);
+ }
+
+ startConsumers(hookConsumers);
+ }
+
+ @VisibleForTesting
+ protected ExecutorService createExecutor() {
+ return new ThreadPoolExecutor(
+ 0, // Core pool size
+ Integer.MAX_VALUE, // Maximum pool size (dynamic scaling)
+ 60L, TimeUnit.SECONDS, // Idle thread timeout
+ new SynchronousQueue<>(), // Direct handoff queue
+ new ThreadFactoryBuilder().setNameFormat(THREADNAME_PREFIX + " thread-%d").build());
+ }
+
+ private void startConsumers(List hookConsumers) {
+ if (consumers == null) {
+ consumers = new ArrayList<>();
+ }
+
+ if (executors == null) {
+ throw new IllegalStateException("Executors must be initialized before starting consumers.");
+ }
+
+ for (final HookConsumer consumer : hookConsumers) {
+ consumers.add(consumer);
+ executors.submit(consumer);
}
}
@@ -501,10 +589,12 @@ private void stopConsumerThreads() {
LOG.info("==> stopConsumerThreads()");
if (consumers != null) {
- for (HookConsumer consumer : consumers) {
+ Iterator iterator = consumers.iterator();
+ while (iterator.hasNext()) {
+ HookConsumer consumer = iterator.next();
consumer.shutdown();
+ iterator.remove(); // Safe removal
}
-
consumers.clear();
}
@@ -533,8 +623,7 @@ private List trimAndPurge(String[] values, String defaultValue) {
private void preprocessEntities(PreprocessorContext context) {
GenericEntityPreprocessor genericEntityPreprocessor = new GenericEntityPreprocessor(this.entityTypesToIgnore, this.entitiesToIgnore);
-
- List entities = context.getEntities();
+ List entities = context.getEntities();
if (entities != null) {
for (int i = 0; i < entities.size(); i++) {
@@ -1131,12 +1220,13 @@ void sortAndPublishMsgsToAtlasHook(long msgBufferingStartTime, Map kafkaMsg) {
- AtlasPerfTracer perf = null;
- HookNotification message = kafkaMsg.getMessage();
- String messageUser = message.getUser();
- long startTime = System.currentTimeMillis();
- NotificationStat stats = new NotificationStat();
- AuditLog auditLog = null;
+ AtlasPerfTracer perf = null;
+ HookNotification message = kafkaMsg.getMessage();
+ String messageUser = message.getUser();
+ long startTime = System.currentTimeMillis();
+ NotificationStat stats = new NotificationStat();
+ AuditLog auditLog = null;
+ boolean importRequestComplete = false;
if (authorizeUsingMessageUser) {
setCurrentUser(messageUser);
@@ -1185,9 +1275,7 @@ void handleMessage(AtlasKafkaMessage kafkaMsg) {
// Used for intermediate conversions during create and update
String exceptionClassName = StringUtils.EMPTY;
for (int numRetries = 0; numRetries < maxRetries; numRetries++) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("handleMessage({}): attempt {}", message.getType().name(), numRetries);
- }
+ LOG.debug("handleMessage({}): attempt {}", message.getType().name(), numRetries);
try {
RequestContext requestContext = RequestContext.get();
@@ -1333,6 +1421,40 @@ void handleMessage(AtlasKafkaMessage kafkaMsg) {
}
break;
+ case IMPORT_TYPES_DEF: {
+ final AtlasTypesDefImportNotification typesDefImportNotification = (AtlasTypesDefImportNotification) message;
+ final String importId = typesDefImportNotification.getImportId();
+ final AtlasTypesDef typesDef = typesDefImportNotification.getTypesDef();
+
+ try {
+ asyncImporter.onImportTypeDef(typesDef, importId);
+ } catch (AtlasBaseException abe) {
+ LOG.error("IMPORT_TYPE_DEF: {} failed to import type definition: {}", importId, typesDef);
+ asyncImporter.onImportComplete(importId);
+ importRequestComplete = true;
+ }
+ }
+ break;
+
+ case IMPORT_ENTITY: {
+ final AtlasEntityImportNotification entityImportNotification = (AtlasEntityImportNotification) message;
+ final String importId = entityImportNotification.getImportId();
+ final AtlasEntityWithExtInfo entityWithExtInfo = entityImportNotification.getEntity();
+ final int position = entityImportNotification.getPosition();
+ boolean completeImport = false;
+
+ try {
+ importRequestComplete = asyncImporter.onImportEntity(entityWithExtInfo, importId, position);
+ } catch (AtlasBaseException abe) {
+ importRequestComplete = true;
+
+ asyncImporter.onImportComplete(importId);
+
+ LOG.error("IMPORT_ENTITY: {} failed to import entity: {}", importId, entityImportNotification);
+ }
+ }
+ break;
+
default:
throw new IllegalStateException("Unknown notification type: " + message.getType().name());
}
@@ -1345,6 +1467,7 @@ void handleMessage(AtlasKafkaMessage kafkaMsg) {
break;
} catch (Throwable e) {
RequestContext.get().resetEntityGuidUpdates();
+
exceptionClassName = e.getClass().getSimpleName();
// don't retry in following conditions:
@@ -1432,6 +1555,10 @@ void handleMessage(AtlasKafkaMessage kafkaMsg) {
nextStatsLogTime = AtlasMetricsCounter.getNextHourStartTime(now);
}
+
+ if (importRequestComplete) {
+ asyncImporter.onCompleteImportRequest(((AtlasEntityImportNotification) message).getImportId());
+ }
}
}
@@ -1501,8 +1628,7 @@ private void createOrUpdate(AtlasEntitiesWithExtInfo entities, boolean isPartial
AtlasEntitiesWithExtInfo batch = new AtlasEntitiesWithExtInfo(entitiesBatch);
AtlasEntityStream batchStream = new AtlasEntityStream(batch, entityStream);
-
- EntityMutationResponse response = atlasEntityStore.createOrUpdate(batchStream, isPartialUpdate);
+ EntityMutationResponse response = atlasEntityStore.createOrUpdate(batchStream, isPartialUpdate);
recordProcessedEntities(response, stats, context);
diff --git a/webapp/src/main/java/org/apache/atlas/web/filters/ActiveServerFilter.java b/webapp/src/main/java/org/apache/atlas/web/filters/ActiveServerFilter.java
index 28a3b15061..04acbfc8f9 100644
--- a/webapp/src/main/java/org/apache/atlas/web/filters/ActiveServerFilter.java
+++ b/webapp/src/main/java/org/apache/atlas/web/filters/ActiveServerFilter.java
@@ -55,7 +55,7 @@ public class ActiveServerFilter implements Filter {
private final String[] adminUriNotFiltered = {"/admin/export", "/admin/import", "/admin/importfile", "/admin/audits",
"/admin/purge", "/admin/expimp/audit", "/admin/metrics", "/admin/server", "/admin/audit/", "admin/tasks",
- "/admin/debug/metrics", "/admin/audits/ageout"};
+ "/admin/debug/metrics", "/admin/audits/ageout", "admin/async/import", "admin/async/import/status"};
private final ActiveInstanceState activeInstanceState;
private final ServiceState serviceState;
diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java
index 0e08542712..f3128b5ab1 100755
--- a/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java
+++ b/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java
@@ -30,12 +30,15 @@
import org.apache.atlas.authorize.AtlasPrivilege;
import org.apache.atlas.discovery.SearchContext;
import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.PList;
import org.apache.atlas.model.audit.AtlasAuditEntry;
import org.apache.atlas.model.audit.AtlasAuditEntry.AuditOperation;
import org.apache.atlas.model.audit.AuditReductionCriteria;
import org.apache.atlas.model.audit.AuditSearchParameters;
import org.apache.atlas.model.audit.EntityAuditEventV2;
import org.apache.atlas.model.audit.EntityAuditEventV2.EntityAuditActionV2;
+import org.apache.atlas.model.impexp.AsyncImportStatus;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
import org.apache.atlas.model.impexp.AtlasExportRequest;
import org.apache.atlas.model.impexp.AtlasExportResult;
import org.apache.atlas.model.impexp.AtlasImportRequest;
@@ -679,6 +682,96 @@ public AtlasImportResult importData(@DefaultValue("{}") @FormDataParam("request"
return result;
}
+ @POST
+ @Path("/async/import")
+ @Produces(Servlets.JSON_MEDIA_TYPE)
+ @Consumes(MediaType.MULTIPART_FORM_DATA)
+ public AtlasAsyncImportRequest importAsync(@DefaultValue("{}") @FormDataParam("request") String jsonData, @FormDataParam("data") InputStream inputStream) throws AtlasBaseException {
+ LOG.debug("==> AdminResource.importAsync(jsonData={}, inputStream={})", jsonData, (inputStream != null));
+
+ AtlasAuthorizationUtils.verifyAccess(new AtlasAdminAccessRequest(AtlasPrivilege.ADMIN_IMPORT), "asyncImportData");
+
+ AtlasAsyncImportRequest asyncImportRequest;
+ boolean releaseExportImportLockOnCompletion = false;
+
+ try {
+ AtlasImportRequest request = AtlasType.fromJson(jsonData, AtlasImportRequest.class);
+ boolean preventMultipleRequests = request != null && request.getOptions() != null && !request.getOptions().containsKey(AtlasImportRequest.OPTION_KEY_REPLICATED_FROM);
+
+ if (preventMultipleRequests) {
+ acquireExportImportLock("import");
+
+ releaseExportImportLockOnCompletion = true;
+ }
+
+ asyncImportRequest = importService.run(request, inputStream, Servlets.getUserName(httpServletRequest), Servlets.getHostName(httpServletRequest), AtlasAuthorizationUtils.getRequestIpAddress(httpServletRequest));
+ } catch (AtlasBaseException excp) {
+ if (excp.getAtlasErrorCode().equals(AtlasErrorCode.IMPORT_ATTEMPTING_EMPTY_ZIP)) {
+ LOG.info(excp.getMessage());
+
+ return new AtlasAsyncImportRequest();
+ } else {
+ LOG.error("importAsync(binary) failed", excp);
+
+ throw excp;
+ }
+ } catch (Exception excp) {
+ LOG.error("importAsync(binary) failed", excp);
+
+ throw new AtlasBaseException(excp);
+ } finally {
+ if (releaseExportImportLockOnCompletion) {
+ releaseExportImportLock();
+ }
+
+ LOG.debug("<== AdminResource.importAsync(binary)");
+ }
+
+ return asyncImportRequest;
+ }
+
+ @DELETE
+ @Path("/async/import/{importId}")
+ @Produces(Servlets.JSON_MEDIA_TYPE)
+ @Consumes(MediaType.APPLICATION_JSON)
+ public void abortAsyncImport(@PathParam("importId") String importId) throws AtlasBaseException {
+ importService.abortAsyncImport(importId);
+ }
+
+ @GET
+ @Path("/async/import/status")
+ @Produces(Servlets.JSON_MEDIA_TYPE)
+ public PList getAsyncImportStatus(@QueryParam("offset") @DefaultValue("0") int offset, @QueryParam("limit") @DefaultValue("50") int limit) throws AtlasBaseException {
+ AtlasPerfTracer perf = null;
+
+ try {
+ if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
+ perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "AdminResource.getAsyncImportStatus()");
+ }
+
+ return importService.getAsyncImportsStatus(offset, limit);
+ } finally {
+ AtlasPerfTracer.log(perf);
+ }
+ }
+
+ @GET
+ @Path("/async/import/status/{importId}")
+ @Produces(Servlets.JSON_MEDIA_TYPE)
+ public AtlasAsyncImportRequest getAsyncImportStatusById(@PathParam("importId") String importId) throws AtlasBaseException {
+ AtlasPerfTracer perf = null;
+
+ try {
+ if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
+ perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "AdminResource.getAsyncImportStatusById(importId=" + importId + ")");
+ }
+
+ return importService.getAsyncImportRequest(importId);
+ } finally {
+ AtlasPerfTracer.log(perf);
+ }
+ }
+
@PUT
@Path("/purge")
@Consumes(Servlets.JSON_MEDIA_TYPE)
diff --git a/webapp/src/test/java/org/apache/atlas/notification/ImportTaskListenerImplTest.java b/webapp/src/test/java/org/apache/atlas/notification/ImportTaskListenerImplTest.java
new file mode 100644
index 0000000000..da89c90fda
--- /dev/null
+++ b/webapp/src/test/java/org/apache/atlas/notification/ImportTaskListenerImplTest.java
@@ -0,0 +1,534 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.notification;
+
+import org.apache.atlas.AtlasException;
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.impexp.AtlasAsyncImportRequest;
+import org.apache.atlas.repository.impexp.AsyncImportService;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Ignore;
+import org.testng.annotations.Test;
+
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.BlockingDeque;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus.ABORTED;
+import static org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus.FAILED;
+import static org.apache.atlas.model.impexp.AtlasAsyncImportRequest.ImportStatus.WAITING;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.atLeast;
+import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
+import static org.testng.Assert.assertNull;
+import static org.testng.Assert.fail;
+
+public class ImportTaskListenerImplTest {
+ private static final String VALID_IMPORT_ID = "valid-id";
+ private static final String INVALID_IMPORT_ID = "invalid-id";
+
+ @Mock
+ private AsyncImportService asyncImportService;
+
+ @Mock
+ private NotificationHookConsumer notificationHookConsumer;
+
+ @Mock
+ private BlockingDeque requestQueue;
+
+ @InjectMocks
+ private ImportTaskListenerImpl importTaskListener;
+
+ private AtlasAsyncImportRequest importRequest;
+
+ @BeforeTest
+ public void setup() throws Exception {
+ MockitoAnnotations.openMocks(this);
+
+ importRequest = mock(AtlasAsyncImportRequest.class);
+
+ when(importRequest.getImportId()).thenReturn("import123");
+ when(importRequest.getTopicName()).thenReturn("topic1");
+
+ requestQueue = mock(BlockingDeque.class);
+ asyncImportService = mock(AsyncImportService.class);
+
+ when(asyncImportService.fetchImportRequestByImportId("import123")).thenReturn(importRequest);
+
+ notificationHookConsumer = mock(NotificationHookConsumer.class);
+ importTaskListener = new ImportTaskListenerImpl(asyncImportService, notificationHookConsumer, requestQueue);
+ }
+
+ @BeforeMethod
+ public void resetMocks() throws AtlasException {
+ MockitoAnnotations.openMocks(this);
+
+ when(importRequest.getImportId()).thenReturn("import123");
+ when(importRequest.getTopicName()).thenReturn("topic1");
+ when(asyncImportService.fetchImportRequestByImportId(any(String.class))).thenReturn(importRequest);
+
+ importTaskListener = new ImportTaskListenerImpl(asyncImportService, notificationHookConsumer, requestQueue);
+ }
+
+ @AfterMethod
+ public void teardown() {
+ Mockito.reset(asyncImportService, notificationHookConsumer, requestQueue, importRequest);
+ }
+
+ @Test
+ public void testOnReceiveImportRequestAddsRequestToQueue() throws InterruptedException, AtlasBaseException {
+ importTaskListener.onReceiveImportRequest(importRequest);
+
+ Thread.sleep(500);
+
+ verify(requestQueue, times(1)).put("import123");
+ verify(asyncImportService, times(1)).updateImportRequest(importRequest);
+ }
+
+ @Test
+ @Ignore
+ public void testOnReceiveImportRequestTriggersStartNextImport() throws Exception {
+ doNothing().when(requestQueue).put("import123");
+ when(requestQueue.poll(10, TimeUnit.SECONDS)).thenReturn("import123");
+
+ importTaskListener.onReceiveImportRequest(importRequest);
+
+ Thread.sleep(500);
+
+ verify(asyncImportService, atLeastOnce()).fetchImportRequestByImportId("import123");
+ }
+
+ @Test(expectedExceptions = AtlasBaseException.class)
+ public void testOnReceiveImportRequestHandlesQueueException() throws InterruptedException, AtlasBaseException {
+ doThrow(new InterruptedException()).when(requestQueue).put(any(String.class));
+
+ try {
+ importTaskListener.onReceiveImportRequest(importRequest);
+ } finally {
+ verify(requestQueue, times(1)).put("import123");
+ verify(asyncImportService, times(1)).updateImportRequest(importRequest);
+ }
+ }
+
+ @Test
+ public void testOnCompleteImportRequest() {
+ importTaskListener.onCompleteImportRequest("import123");
+
+ verify(notificationHookConsumer, times(1))
+ .closeImportConsumer("import123", "ATLAS_IMPORT_import123");
+ }
+
+ @Test
+ public void testPopulateRequestQueueFillsQueueWithRequests() throws InterruptedException {
+ List imports = new ArrayList<>();
+
+ imports.add("import1");
+ imports.add("import2");
+ imports.add("import3");
+
+ when(asyncImportService.fetchQueuedImportRequests()).thenReturn(imports);
+
+ importTaskListener.populateRequestQueue();
+
+ verify(requestQueue, times(1)).offer("import1", 5, TimeUnit.SECONDS);
+ verify(requestQueue, times(1)).offer("import2", 5, TimeUnit.SECONDS);
+ verify(requestQueue, times(1)).offer("import3", 5, TimeUnit.SECONDS);
+ verify(asyncImportService, times(1)).fetchQueuedImportRequests();
+ }
+
+ @Test
+ public void testPopulateRequestQueueHandlesInterruptedException() throws InterruptedException {
+ List imports = new ArrayList<>();
+
+ imports.add("import1");
+
+ when(asyncImportService.fetchQueuedImportRequests()).thenReturn(imports);
+
+ try {
+ doThrow(new InterruptedException()).when(requestQueue)
+ .offer(any(String.class), eq(5L), eq(TimeUnit.SECONDS));
+ } catch (InterruptedException e) {
+ // ignored
+ }
+
+ importTaskListener.populateRequestQueue();
+
+ verify(requestQueue, times(1)).offer("import1", 5, TimeUnit.SECONDS);
+ }
+
+ @Test
+ public void testStopImport_GracefulShutdown() throws Exception {
+ ExecutorService mockExecutorService = mock(ExecutorService.class);
+
+ when(mockExecutorService.awaitTermination(30, TimeUnit.SECONDS)).thenReturn(true);
+
+ Field executorServiceField = ImportTaskListenerImpl.class.getDeclaredField("executorService");
+
+ executorServiceField.setAccessible(true);
+ executorServiceField.set(importTaskListener, mockExecutorService);
+
+ importTaskListener.stop();
+
+ verify(mockExecutorService, times(1)).shutdown();
+ verify(mockExecutorService, times(1)).awaitTermination(30, TimeUnit.SECONDS);
+ verify(mockExecutorService, never()).shutdownNow();
+ }
+
+ @Test
+ public void testStopImport_ForcedShutdown() throws Exception {
+ ExecutorService mockExecutorService = mock(ExecutorService.class);
+
+ when(mockExecutorService.awaitTermination(30, TimeUnit.SECONDS)).thenReturn(false);
+ when(mockExecutorService.awaitTermination(10, TimeUnit.SECONDS)).thenReturn(false);
+
+ Field executorServiceField = ImportTaskListenerImpl.class.getDeclaredField("executorService");
+
+ executorServiceField.setAccessible(true);
+ executorServiceField.set(importTaskListener, mockExecutorService);
+
+ importTaskListener.stop();
+
+ verify(mockExecutorService, times(1)).shutdown();
+ verify(mockExecutorService, times(1)).awaitTermination(30, TimeUnit.SECONDS);
+ verify(mockExecutorService, times(1)).shutdownNow();
+ }
+
+ @Test
+ public void testInstanceIsActive() {
+ importTaskListener.instanceIsActive();
+
+ verify(asyncImportService, atLeast(0)).fetchQueuedImportRequests();
+ verify(asyncImportService, atLeast(0)).fetchInProgressImportIds();
+ }
+
+ @Test
+ public void testInstanceIsPassive() throws InterruptedException, NoSuchFieldException, IllegalAccessException {
+ ExecutorService mockExecutorService = mock(ExecutorService.class);
+
+ when(mockExecutorService.awaitTermination(anyLong(), any(TimeUnit.class))).thenReturn(true);
+
+ Field executorServiceField = ImportTaskListenerImpl.class.getDeclaredField("executorService");
+
+ executorServiceField.setAccessible(true);
+ executorServiceField.set(importTaskListener, mockExecutorService);
+
+ importTaskListener.instanceIsPassive();
+
+ verify(mockExecutorService, times(1)).shutdown();
+
+ Field semaphoreField = ImportTaskListenerImpl.class.getDeclaredField("asyncImportSemaphore");
+
+ semaphoreField.setAccessible(true);
+
+ Semaphore semaphore = (Semaphore) semaphoreField.get(importTaskListener);
+
+ assertEquals(semaphore.availablePermits(), 1);
+ }
+
+ @Test
+ public void testGetHandlerOrder() {
+ int order = importTaskListener.getHandlerOrder();
+
+ assertEquals(order, 8);
+ }
+
+ @Test
+ public void testStartAsyncImportIfAvailable_WithInvalidStatus() throws Exception {
+ when(importRequest.getStatus()).thenReturn(FAILED);
+ when(requestQueue.poll(anyLong(), any(TimeUnit.class))).thenReturn("import123").thenReturn(null);
+
+ importTaskListener.onReceiveImportRequest(importRequest);
+
+ verify(notificationHookConsumer, never()).startAsyncImportConsumer(any(), anyString(), anyString());
+ }
+
+ @Test
+ public void testStartImportConsumer_Successful() throws Exception {
+ when(importRequest.getStatus()).thenReturn(WAITING);
+ when(importRequest.getTopicName()).thenReturn("topic1");
+
+ ExecutorService realExecutor = java.util.concurrent.Executors.newSingleThreadExecutor();
+ Field executorField = ImportTaskListenerImpl.class.getDeclaredField("executorService");
+
+ executorField.setAccessible(true);
+ executorField.set(importTaskListener, realExecutor);
+ when(requestQueue.poll(anyLong(), any(TimeUnit.class))).thenReturn("import123");
+
+ importTaskListener.onReceiveImportRequest(importRequest);
+
+ Thread.sleep(500);
+
+ verify(notificationHookConsumer, atLeastOnce())
+ .startAsyncImportConsumer(NotificationInterface.NotificationType.ASYNC_IMPORT, "import123", "topic1");
+
+ realExecutor.shutdownNow();
+ }
+
+ @Test
+ public void testStartImportConsumer_Failure() throws Exception {
+ when(importRequest.getStatus()).thenReturn(WAITING);
+ when(importRequest.getTopicName()).thenReturn("topic1");
+
+ doThrow(new RuntimeException("Consumer failed"))
+ .when(notificationHookConsumer)
+ .startAsyncImportConsumer(NotificationInterface.NotificationType.ASYNC_IMPORT, "import123", "topic1");
+
+ doAnswer(invocation -> {
+ Object newStatus = invocation.getArgument(0);
+ when(importRequest.getStatus()).thenReturn((AtlasAsyncImportRequest.ImportStatus) newStatus);
+ return null;
+ }).when(importRequest).setStatus(any());
+
+ ExecutorService realExecutor = java.util.concurrent.Executors.newSingleThreadExecutor();
+ Field executorField = ImportTaskListenerImpl.class.getDeclaredField("executorService");
+
+ executorField.setAccessible(true);
+ executorField.set(importTaskListener, realExecutor);
+
+ when(requestQueue.poll(anyLong(), any(TimeUnit.class))).thenReturn("import123");
+
+ importTaskListener.onReceiveImportRequest(importRequest);
+
+ Thread.sleep(500);
+
+ verify(notificationHookConsumer, times(1))
+ .closeImportConsumer("import123", "ATLAS_IMPORT_import123");
+
+ realExecutor.shutdownNow();
+ }
+
+ @Test(dataProvider = "importQueueScenarios")
+ public void testGetImportIdFromQueue(String[] pollResults, AtlasAsyncImportRequest[] fetchResults, String expectedImportId, int expectedPollCount) throws InterruptedException {
+ //configure mock queue behaviour
+ if (pollResults.length > 0) {
+ when(requestQueue.poll(anyLong(), any())).thenReturn(pollResults[0], java.util.Arrays.copyOfRange(pollResults, 1, pollResults.length));
+ }
+
+ // Configure fetch service behavior
+ for (AtlasAsyncImportRequest fetchResult : fetchResults) {
+ when(asyncImportService.fetchImportRequestByImportId(fetchResult.getImportId())).thenReturn(fetchResult);
+ }
+
+ // Execute the method
+ AtlasAsyncImportRequest result = importTaskListener.getNextImportFromQueue();
+
+ // Validate results
+ if (expectedImportId == null) {
+ assertNull(result, "Expected result to be null.");
+ } else {
+ assertNotNull(result, "Expected a valid import request.");
+ assertEquals(result.getImportId(), expectedImportId);
+ }
+
+ // Verify that poll was called expected times
+ verify(requestQueue, atLeast(expectedPollCount)).poll(anyLong(), any());
+ }
+
+ @DataProvider(name = "importQueueScenarios")
+ public Object[][] provideImportQueueScenarios() {
+ AtlasAsyncImportRequest validRequest = new AtlasAsyncImportRequest();
+ AtlasAsyncImportRequest invalidRequest = new AtlasAsyncImportRequest();
+
+ validRequest.setImportId(VALID_IMPORT_ID);
+ validRequest.setStatus(WAITING);
+
+ invalidRequest.setImportId(INVALID_IMPORT_ID);
+ invalidRequest.setStatus(ABORTED);
+
+ return new Object[][] {
+ {new String[] {VALID_IMPORT_ID}, new AtlasAsyncImportRequest[] {validRequest}, VALID_IMPORT_ID, 1},
+ {new String[] {null, null, null, null, null}, new AtlasAsyncImportRequest[] {}, null, 5},
+ {new String[] {INVALID_IMPORT_ID, VALID_IMPORT_ID}, new AtlasAsyncImportRequest[] {invalidRequest, validRequest}, VALID_IMPORT_ID, 2},
+ {new String[] {INVALID_IMPORT_ID, INVALID_IMPORT_ID, VALID_IMPORT_ID}, new AtlasAsyncImportRequest[] {invalidRequest, invalidRequest, validRequest}, VALID_IMPORT_ID, 3},
+ {new String[] {null, null, VALID_IMPORT_ID}, new AtlasAsyncImportRequest[] {validRequest}, VALID_IMPORT_ID, 3}
+ };
+ }
+
+ @Test
+ public void testStartAsyncImportIfAvailable_SemaphoreUnavailable() throws AtlasException {
+ Semaphore mockSemaphore = mock(Semaphore.class);
+ ExecutorService mockExecutor = mock(ExecutorService.class);
+ ImportTaskListenerImpl sut = new ImportTaskListenerImpl(asyncImportService, notificationHookConsumer, requestQueue);
+
+ setExecutorServiceAndSemaphore(sut, mockExecutor, mockSemaphore);
+
+ when(mockSemaphore.tryAcquire()).thenReturn(false);
+
+ sut.startAsyncImportIfAvailable(VALID_IMPORT_ID);
+
+ verify(mockSemaphore, times(1)).tryAcquire(); // Ensures semaphore was checked
+ verify(asyncImportService, never()).fetchImportRequestByImportId(anyString());
+ verify(mockExecutor, never()).submit(any(Runnable.class));
+ verify(mockSemaphore, never()).release();
+ }
+
+ @Test
+ public void testStartAsyncImportIfAvailable_ValidImportIdProvided() throws AtlasException {
+ Semaphore asyncImportSemaphore = mock(Semaphore.class);
+ ExecutorService executorService = mock(ExecutorService.class);
+ ImportTaskListenerImpl sut = new ImportTaskListenerImpl(asyncImportService, notificationHookConsumer, requestQueue);
+
+ setExecutorServiceAndSemaphore(sut, executorService, asyncImportSemaphore);
+
+ AtlasAsyncImportRequest validRequest = new AtlasAsyncImportRequest();
+
+ validRequest.setImportId(VALID_IMPORT_ID);
+ validRequest.setStatus(WAITING);
+
+ when(asyncImportSemaphore.tryAcquire()).thenReturn(true);
+ when(asyncImportService.fetchImportRequestByImportId(VALID_IMPORT_ID)).thenReturn(validRequest);
+
+ sut.startAsyncImportIfAvailable(VALID_IMPORT_ID);
+
+ verify(asyncImportSemaphore, times(1)).tryAcquire();
+ verify(executorService, times(1)).submit(any(Runnable.class));
+ verify(asyncImportSemaphore, never()).release(); // Should not release since task is submitted
+ }
+
+ @Test
+ public void testStartAsyncImportIfAvailable_InvalidImportIdProvided() throws AtlasException {
+ Semaphore asyncImportSemaphore = mock(Semaphore.class);
+ ExecutorService executorService = mock(ExecutorService.class);
+ ImportTaskListenerImpl sut = new ImportTaskListenerImpl(asyncImportService, notificationHookConsumer, requestQueue);
+
+ setExecutorServiceAndSemaphore(sut, executorService, asyncImportSemaphore);
+
+ AtlasAsyncImportRequest invalidRequest = new AtlasAsyncImportRequest();
+
+ invalidRequest.setImportId(INVALID_IMPORT_ID);
+ invalidRequest.setStatus(ABORTED);
+
+ when(asyncImportSemaphore.tryAcquire()).thenReturn(true);
+ when(asyncImportService.fetchImportRequestByImportId(INVALID_IMPORT_ID)).thenReturn(invalidRequest);
+
+ sut.startAsyncImportIfAvailable(INVALID_IMPORT_ID);
+
+ verify(asyncImportSemaphore, times(1)).tryAcquire();
+ verify(asyncImportSemaphore, times(1)).release(); // Ensures semaphore is released on failure
+ verify(executorService, never()).submit(any(Runnable.class));
+ }
+
+ @Test
+ public void testStartAsyncImportIfAvailable_NullImportId_ValidRequestFromQueue() throws AtlasException, InterruptedException {
+ Semaphore asyncImportSemaphore = mock(Semaphore.class);
+ ExecutorService executorService = mock(ExecutorService.class);
+ ImportTaskListenerImpl sut = new ImportTaskListenerImpl(asyncImportService, notificationHookConsumer, requestQueue);
+
+ setExecutorServiceAndSemaphore(sut, executorService, asyncImportSemaphore);
+
+ AtlasAsyncImportRequest validRequest = new AtlasAsyncImportRequest();
+
+ validRequest.setImportId(VALID_IMPORT_ID);
+ validRequest.setStatus(WAITING);
+
+ when(asyncImportSemaphore.tryAcquire()).thenReturn(true);
+ when(requestQueue.poll(anyLong(), any())).thenReturn(VALID_IMPORT_ID);
+ when(asyncImportService.fetchImportRequestByImportId(VALID_IMPORT_ID)).thenReturn(validRequest);
+
+ sut.startAsyncImportIfAvailable(null);
+
+ verify(asyncImportSemaphore, times(1)).tryAcquire();
+ verify(executorService, times(1)).submit(any(Runnable.class));
+ verify(asyncImportSemaphore, never()).release();
+ }
+
+ @Test
+ public void testStartAsyncImportIfAvailable_NullImportId_InvalidRequestFromQueue() throws AtlasException, InterruptedException {
+ Semaphore asyncImportSemaphore = mock(Semaphore.class);
+ ExecutorService executorService = mock(ExecutorService.class);
+ ImportTaskListenerImpl sut = new ImportTaskListenerImpl(asyncImportService, notificationHookConsumer, requestQueue);
+
+ setExecutorServiceAndSemaphore(sut, executorService, asyncImportSemaphore);
+
+ AtlasAsyncImportRequest invalidRequest = new AtlasAsyncImportRequest();
+
+ invalidRequest.setImportId(INVALID_IMPORT_ID);
+ invalidRequest.setStatus(ABORTED);
+
+ when(requestQueue.poll(anyLong(), any())).thenReturn(INVALID_IMPORT_ID).thenReturn(null);
+ when(asyncImportService.fetchImportRequestByImportId(INVALID_IMPORT_ID)).thenReturn(invalidRequest);
+
+ when(asyncImportSemaphore.tryAcquire()).thenReturn(true);
+
+ sut.startAsyncImportIfAvailable(null);
+
+ verify(asyncImportSemaphore, times(1)).tryAcquire();
+ verify(executorService, never()).submit(any(Runnable.class));
+ verify(asyncImportSemaphore, times(1)).release();
+ }
+
+ @Test
+ public void testStartAsyncImportIfAvailable_ExceptionDuringExecution() throws AtlasException {
+ Semaphore asyncImportSemaphore = mock(Semaphore.class);
+ ExecutorService executorService = mock(ExecutorService.class);
+ ImportTaskListenerImpl sut = new ImportTaskListenerImpl(asyncImportService, notificationHookConsumer, requestQueue);
+
+ setExecutorServiceAndSemaphore(sut, executorService, asyncImportSemaphore);
+
+ when(asyncImportSemaphore.tryAcquire()).thenReturn(true);
+ when(asyncImportService.fetchImportRequestByImportId(VALID_IMPORT_ID)).thenThrow(new RuntimeException("Unexpected Error"));
+
+ try {
+ sut.startAsyncImportIfAvailable(VALID_IMPORT_ID);
+ } catch (Exception e) {
+ fail("Exception should not propagate, but it did.");
+ }
+
+ verify(asyncImportSemaphore, times(1)).release();
+ }
+
+ private void setExecutorServiceAndSemaphore(ImportTaskListenerImpl importTaskListener, ExecutorService mockExecutor, Semaphore mockSemaphore) {
+ try {
+ Field executorField = ImportTaskListenerImpl.class.getDeclaredField("executorService");
+
+ executorField.setAccessible(true);
+ executorField.set(importTaskListener, mockExecutor);
+
+ Field semaphoreField = ImportTaskListenerImpl.class.getDeclaredField("asyncImportSemaphore");
+
+ semaphoreField.setAccessible(true);
+ semaphoreField.set(importTaskListener, mockSemaphore);
+ } catch (Exception e) {
+ fail("Failed to set mocks for testing: " + e.getMessage());
+ }
+ }
+}
diff --git a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java
index 6fc3cc6d13..fa79576472 100644
--- a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java
+++ b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java
@@ -29,7 +29,10 @@
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo;
import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.model.notification.ImportNotification;
+import org.apache.atlas.model.notification.MessageSource;
import org.apache.atlas.repository.converters.AtlasInstanceConverter;
+import org.apache.atlas.repository.impexp.AsyncImporter;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
import org.apache.atlas.repository.store.graph.v2.EntityStream;
import org.apache.atlas.type.AtlasType;
@@ -49,9 +52,11 @@
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
+import java.util.Collections;
import java.util.List;
import java.util.Properties;
+import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyString;
@@ -88,6 +93,9 @@ public class NotificationHookConsumerKafkaTest {
@Mock
private AtlasMetricsUtil metricsUtil;
+ @Mock
+ private AsyncImporter asyncImporter;
+
@BeforeTest
public void setup() throws Exception {
MockitoAnnotations.initMocks(this);
@@ -108,11 +116,11 @@ public void shutdown() {
}
@Test
- public void testConsumerConsumesNewMessageWithAutoCommitDisabled() throws AtlasException, InterruptedException, AtlasBaseException {
- produceMessage(new HookNotificationV1.EntityCreateRequest("test_user1", createEntity()));
+ public void testConsumerConsumesNewMessageWithAutoCommitDisabled() throws AtlasException, AtlasBaseException {
+ produceMessage(null, NotificationInterface.NotificationType.HOOK, new HookNotificationV1.EntityCreateRequest("test_user1", createEntity()));
- NotificationConsumer consumer = createNewConsumer(kafkaNotification, false);
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+ NotificationConsumer consumer = createNewConsumer(NotificationInterface.NotificationType.HOOK, kafkaNotification, false);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(consumer);
consumeOneMessage(consumer, hookConsumer);
@@ -120,7 +128,7 @@ public void testConsumerConsumesNewMessageWithAutoCommitDisabled() throws AtlasE
verify(atlasEntityStore).createOrUpdate(any(EntityStream.class), anyBoolean());
// produce another message, and make sure it moves ahead. If commit succeeded, this would work.
- produceMessage(new HookNotificationV1.EntityCreateRequest("test_user2", createEntity()));
+ produceMessage(null, NotificationInterface.NotificationType.HOOK, new HookNotificationV1.EntityCreateRequest("test_user2", createEntity()));
consumeOneMessage(consumer, hookConsumer);
verify(atlasEntityStore, times(2)).createOrUpdate(any(EntityStream.class), anyBoolean());
@@ -128,15 +136,15 @@ public void testConsumerConsumesNewMessageWithAutoCommitDisabled() throws AtlasE
}
@Test(enabled = false)
- public void consumerConsumesNewMessageButCommitThrowsAnException_MessageOffsetIsRecorded() throws AtlasException, InterruptedException, AtlasBaseException {
+ public void consumerConsumesNewMessageButCommitThrowsAnException_MessageOffsetIsRecorded() throws AtlasException {
ExceptionThrowingCommitConsumer consumer = createNewConsumerThatThrowsExceptionInCommit(kafkaNotification, true);
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(consumer);
- produceMessage(new HookNotificationV1.EntityCreateRequest("test_user2", createEntity()));
+ produceMessage(null, NotificationInterface.NotificationType.HOOK, new HookNotificationV1.EntityCreateRequest("test_user2", createEntity()));
try {
- produceMessage(new HookNotificationV1.EntityCreateRequest("test_user1", createEntity()));
+ produceMessage(null, NotificationInterface.NotificationType.HOOK, new HookNotificationV1.EntityCreateRequest("test_user2", createEntity()));
consumeOneMessage(consumer, hookConsumer);
consumeOneMessage(consumer, hookConsumer);
} catch (KafkaException ex) {
@@ -145,7 +153,7 @@ public void consumerConsumesNewMessageButCommitThrowsAnException_MessageOffsetIs
consumer.disableCommitExpcetion();
- produceMessage(new HookNotificationV1.EntityCreateRequest("test_user1", createEntity()));
+ produceMessage(null, NotificationInterface.NotificationType.HOOK, new HookNotificationV1.EntityCreateRequest("test_user1", createEntity()));
consumeOneMessage(consumer, hookConsumer);
consumeOneMessage(consumer, hookConsumer);
@@ -154,31 +162,61 @@ public void consumerConsumesNewMessageButCommitThrowsAnException_MessageOffsetIs
@Test(dependsOnMethods = "testConsumerConsumesNewMessageWithAutoCommitDisabled")
public void testConsumerRemainsAtSameMessageWithAutoCommitEnabled() throws Exception {
- produceMessage(new HookNotificationV1.EntityCreateRequest("test_user3", createEntity()));
+ produceMessage(null, NotificationInterface.NotificationType.HOOK, new HookNotificationV1.EntityCreateRequest("test_user3", createEntity()));
- NotificationConsumer consumer = createNewConsumer(kafkaNotification, true);
+ NotificationConsumer consumer = createNewConsumer(NotificationInterface.NotificationType.HOOK, kafkaNotification, true);
assertNotNull(consumer);
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(consumer);
consumeOneMessage(consumer, hookConsumer);
verify(atlasEntityStore).createOrUpdate(any(EntityStream.class), anyBoolean());
// produce another message, but this will not be consumed, as commit code is not executed in hook consumer.
- produceMessage(new HookNotificationV1.EntityCreateRequest("test_user4", createEntity()));
+ produceMessage(null, NotificationInterface.NotificationType.HOOK, new HookNotificationV1.EntityCreateRequest("test_user4", createEntity()));
consumeOneMessage(consumer, hookConsumer);
verify(atlasEntityStore, times(2)).createOrUpdate(any(EntityStream.class), anyBoolean());
}
+ @Test
+ public void testImportMessagesArePublishedToDynamicTopicAndConsumedAndProcessed() throws AtlasException, AtlasBaseException {
+ final String importId = "1b198cf8b55fed2e7829efea11f77795";
+ final String topic = "ATLAS_IMPORT_1b198cf8b55fed2e7829efea11f77795";
+
+ produceMessage(topic, NotificationInterface.NotificationType.ASYNC_IMPORT, new ImportNotification.AtlasEntityImportNotification(importId, "test_user1", new AtlasEntity.AtlasEntityWithExtInfo(createV2Entity()), 1));
+
+ // adding dynamic topic created for the notification type and creating consumer for the same
+ addTopicToNotification(NotificationInterface.NotificationType.ASYNC_IMPORT, kafkaNotification, topic);
+
+ NotificationConsumer consumer = createNewConsumer(NotificationInterface.NotificationType.ASYNC_IMPORT, kafkaNotification, false);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
+ NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(consumer);
+
+ consumeOneMessage(consumer, hookConsumer);
+
+ verify(asyncImporter).onImportEntity(any(AtlasEntity.AtlasEntityWithExtInfo.class), anyString(), anyInt());
+
+ // produce another message, and make sure it moves ahead. If commit succeeded, this would work.
+ produceMessage(topic, NotificationInterface.NotificationType.ASYNC_IMPORT, new ImportNotification.AtlasEntityImportNotification(importId, "test_user1", new AtlasEntity.AtlasEntityWithExtInfo(createV2Entity()), 2));
+ consumeOneMessage(consumer, hookConsumer);
+
+ verify(asyncImporter, times(2)).onImportEntity(any(AtlasEntity.AtlasEntityWithExtInfo.class), anyString(), anyInt());
+ reset(asyncImporter);
+ }
+
protected String randomString() {
return RandomStringUtils.randomAlphanumeric(10);
}
- AtlasKafkaConsumer createNewConsumer(KafkaNotification kafkaNotification, boolean autoCommitEnabled) {
- return (AtlasKafkaConsumer) kafkaNotification.createConsumers(NotificationInterface.NotificationType.HOOK, 1, autoCommitEnabled).get(0);
+ AtlasKafkaConsumer createNewConsumer(NotificationInterface.NotificationType notificationType, KafkaNotification kafkaNotification, boolean autoCommitEnabled) {
+ return (AtlasKafkaConsumer) kafkaNotification.createConsumers(notificationType, 1, autoCommitEnabled).get(0);
+ }
+
+ void addTopicToNotification(NotificationInterface.NotificationType notificationType, KafkaNotification kafkaNotification, String topic) throws AtlasBaseException {
+ kafkaNotification.addTopicToNotificationType(notificationType, topic);
}
ExceptionThrowingCommitConsumer createNewConsumerThatThrowsExceptionInCommit(KafkaNotification kafkaNotification, boolean autoCommitEnabled) {
@@ -187,11 +225,11 @@ ExceptionThrowingCommitConsumer createNewConsumerThatThrowsExceptionInCommit(Kaf
prop.put("enable.auto.commit", autoCommitEnabled);
KafkaConsumer consumer = kafkaNotification.getOrCreateKafkaConsumer(null, prop, NotificationInterface.NotificationType.HOOK, 0);
+
return new ExceptionThrowingCommitConsumer(NotificationInterface.NotificationType.HOOK, consumer, autoCommitEnabled, 1000);
}
- void consumeOneMessage(NotificationConsumer consumer,
- NotificationHookConsumer.HookConsumer hookConsumer) {
+ void consumeOneMessage(NotificationConsumer consumer, NotificationHookConsumer.HookConsumer hookConsumer) {
long startTime = System.currentTimeMillis(); //fetch starting time
while ((System.currentTimeMillis() - startTime) < 10000) {
@@ -201,7 +239,7 @@ void consumeOneMessage(NotificationConsumer consumer,
hookConsumer.handleMessage(msg);
}
- if (messages.size() > 0) {
+ if (!messages.isEmpty()) {
break;
}
}
@@ -278,16 +316,20 @@ void cleanUpNotificationService() {
}
}
- private void produceMessage(HookNotification message) throws NotificationException {
- kafkaNotification.send(NotificationInterface.NotificationType.HOOK, message);
+ private void produceMessage(String topic, NotificationInterface.NotificationType notificationType, HookNotification message) throws NotificationException {
+ if (notificationType == NotificationInterface.NotificationType.HOOK) {
+ kafkaNotification.send(NotificationInterface.NotificationType.HOOK, message);
+ } else {
+ kafkaNotification.send(topic, Collections.singletonList(message), new MessageSource());
+ }
}
private static class ExceptionThrowingCommitConsumer extends AtlasKafkaConsumer {
private boolean exceptionThrowingEnabled;
- public ExceptionThrowingCommitConsumer(NotificationInterface.NotificationType notificationType,
- KafkaConsumer kafkaConsumer, boolean autoCommitEnabled, long pollTimeoutMilliSeconds) {
+ public ExceptionThrowingCommitConsumer(NotificationInterface.NotificationType notificationType, KafkaConsumer kafkaConsumer, boolean autoCommitEnabled, long pollTimeoutMilliSeconds) {
super(notificationType, kafkaConsumer, autoCommitEnabled, pollTimeoutMilliSeconds);
+
exceptionThrowingEnabled = true;
}
diff --git a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerTest.java b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerTest.java
index 8b3ff75983..e1541fd8d5 100644
--- a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerTest.java
+++ b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerTest.java
@@ -17,6 +17,7 @@
*/
package org.apache.atlas.notification;
+import org.apache.atlas.AtlasConfiguration;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.exception.AtlasBaseException;
@@ -29,6 +30,7 @@
import org.apache.atlas.model.notification.HookNotification.HookNotificationType;
import org.apache.atlas.notification.NotificationInterface.NotificationType;
import org.apache.atlas.repository.converters.AtlasInstanceConverter;
+import org.apache.atlas.repository.impexp.AsyncImporter;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
import org.apache.atlas.repository.store.graph.v2.EntityStream;
import org.apache.atlas.type.AtlasType;
@@ -46,23 +48,27 @@
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
+import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ExecutorService;
+import static org.apache.atlas.notification.NotificationInterface.NotificationType.ASYNC_IMPORT;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyBoolean;
import static org.mockito.Mockito.anyList;
import static org.mockito.Mockito.anyLong;
import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
+import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
@@ -91,6 +97,9 @@ public class NotificationHookConsumerTest {
@Mock
private AtlasMetricsUtil metricsUtil;
+ @Mock
+ private AsyncImporter asyncImporter;
+
@BeforeMethod
public void setup() throws AtlasBaseException {
MockitoAnnotations.initMocks(this);
@@ -108,7 +117,7 @@ public void setup() throws AtlasBaseException {
@Test
public void testConsumerCanProceedIfServerIsReady() throws Exception {
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(mock(NotificationConsumer.class));
NotificationHookConsumer.Timer timer = mock(NotificationHookConsumer.Timer.class);
@@ -121,7 +130,7 @@ public void testConsumerCanProceedIfServerIsReady() throws Exception {
@Test
public void testConsumerWaitsNTimesIfServerIsNotReadyNTimes() throws Exception {
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(mock(NotificationConsumer.class));
NotificationHookConsumer.Timer timer = mock(NotificationHookConsumer.Timer.class);
@@ -138,7 +147,7 @@ public void testConsumerWaitsNTimesIfServerIsNotReadyNTimes() throws Exception {
@Test
public void testCommitIsCalledWhenMessageIsProcessed() throws AtlasServiceException, AtlasException {
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
NotificationConsumer consumer = mock(NotificationConsumer.class);
NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(consumer);
EntityCreateRequest message = mock(EntityCreateRequest.class);
@@ -155,7 +164,7 @@ public void testCommitIsCalledWhenMessageIsProcessed() throws AtlasServiceExcept
@Test
public void testCommitIsNotCalledEvenWhenMessageProcessingFails() throws AtlasServiceException, AtlasException, AtlasBaseException {
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
NotificationConsumer consumer = mock(NotificationConsumer.class);
NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(consumer);
EntityCreateRequest message = new EntityCreateRequest("user", Collections.singletonList(mock(Referenceable.class)));
@@ -169,7 +178,7 @@ public void testCommitIsNotCalledEvenWhenMessageProcessingFails() throws AtlasSe
@Test
public void testConsumerProceedsWithFalseIfInterrupted() throws Exception {
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(mock(NotificationConsumer.class));
NotificationHookConsumer.Timer timer = mock(NotificationHookConsumer.Timer.class);
@@ -189,11 +198,11 @@ public void testConsumersStartedIfHAIsDisabled() throws Exception {
when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(false);
when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
when(notificationInterface.createConsumers(NotificationType.HOOK, 1)).thenReturn(consumers);
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
notificationHookConsumer.startInternal(configuration, executorService);
verify(notificationInterface).createConsumers(NotificationType.HOOK, 1);
- verify(executorService).submit(any(NotificationHookConsumer.HookConsumer.class));
+ verify(executorService, times(1)).submit(any(NotificationHookConsumer.HookConsumer.class));
}
@Test
@@ -207,7 +216,7 @@ public void testConsumersAreNotStartedIfHAIsEnabled() throws Exception {
when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true);
when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
when(notificationInterface.createConsumers(NotificationType.HOOK, 1)).thenReturn(consumers);
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
notificationHookConsumer.startInternal(configuration, executorService);
@@ -226,7 +235,7 @@ public void testConsumersAreStartedWhenInstanceBecomesActive() throws Exception
when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
when(notificationInterface.createConsumers(NotificationType.HOOK, 1)).thenReturn(consumers);
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
notificationHookConsumer.startInternal(configuration, executorService);
notificationHookConsumer.instanceIsActive();
@@ -246,7 +255,7 @@ public void testConsumersAreStoppedWhenInstanceBecomesPassive() throws Exception
when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(true);
when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
when(notificationInterface.createConsumers(NotificationType.HOOK, 1)).thenReturn(consumers);
- final NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+ final NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
doAnswer(new Answer() {
@Override
@@ -277,7 +286,7 @@ public void consumersStoppedBeforeStarting() throws Exception {
when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(true);
when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
when(notificationInterface.createConsumers(NotificationType.HOOK, 1)).thenReturn(consumers);
- final NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+ final NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
notificationHookConsumer.startInternal(configuration, executorService);
notificationHookConsumer.instanceIsPassive();
@@ -331,8 +340,159 @@ public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
assertFalse(notificationHookConsumer.consumers.get(0).isAlive());
}
+ @Test
+ public void onCloseImportConsumerShutdownConsumerAndDeletesTopic() throws Exception {
+ String importId = "1b198cf8b55fed2e7829efea11f77795";
+ String topic = AtlasConfiguration.ASYNC_IMPORT_TOPIC_PREFIX.getString() + importId;
+ List> consumers = new ArrayList<>();
+
+ NotificationConsumer notificationHookImportConsumerMock = mock(NotificationConsumer.class);
+
+ when(notificationHookImportConsumerMock.subscription()).thenReturn(Collections.emptySet());
+ when(notificationHookImportConsumerMock.getTopicPartition()).thenReturn(Collections.emptySet());
+ doNothing().when(notificationHookImportConsumerMock).close();
+
+ consumers.add(notificationHookImportConsumerMock);
+
+ doNothing().when(notificationInterface).addTopicToNotificationType(ASYNC_IMPORT, topic);
+ when(notificationInterface.createConsumers(ASYNC_IMPORT, 1)).thenReturn(consumers);
+ doNothing().when(notificationInterface).deleteTopic(ASYNC_IMPORT, AtlasConfiguration.ASYNC_IMPORT_TOPIC_PREFIX.getString() + importId);
+
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
+
+ // setting this just so this test would not create hook consumers
+ Field consumerDisabledField = NotificationHookConsumer.class.getDeclaredField("consumerDisabled");
+ consumerDisabledField.setAccessible(true);
+ consumerDisabledField.set(notificationHookConsumer, true);
+
+ // initializing the executors
+ notificationHookConsumer.startInternal(configuration, null);
+
+ notificationHookConsumer.startAsyncImportConsumer(ASYNC_IMPORT, importId, "ATLAS_IMPORT_" + importId);
+
+ // consumer created
+ assertTrue(notificationHookConsumer.consumers.stream().anyMatch(consumer -> consumer.getName().contains(importId)));
+
+ notificationHookConsumer.closeImportConsumer(importId, "ATLAS_IMPORT_" + importId);
+
+ // consumer deleted / shutdown and topic deleted
+ assertTrue(notificationHookConsumer.consumers.stream().noneMatch(consumer -> consumer.getName().contains(importId)));
+ verify(notificationInterface).deleteTopic(ASYNC_IMPORT, AtlasConfiguration.ASYNC_IMPORT_TOPIC_PREFIX.getString() + importId);
+ }
+
+ @Test
+ public void testExecutorCreatedOnlyOnceAcrossStartAndHAActive() throws Exception {
+ // Setup
+ when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(false);
+ when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
+ when(serviceState.getState()).thenReturn(ServiceState.ServiceStateValue.ACTIVE);
+
+ List> consumers = new ArrayList<>();
+ consumers.add(mock(NotificationConsumer.class));
+ when(notificationInterface.createConsumers(NotificationType.HOOK, 1)).thenReturn(consumers);
+
+ TestableNotificationHookConsumer hookConsumer = new TestableNotificationHookConsumer();
+
+ // Call startInternal() twice
+ hookConsumer.startInternal(configuration, null);
+ hookConsumer.startInternal(configuration, null);
+
+ // Simulate HA active instance, which may call executor creation
+ hookConsumer.instanceIsActive();
+
+ // Validate executor was created only once
+ assertEquals(hookConsumer.getExecutorCreationCount(), 1, "Executor should be created only once and reused");
+ }
+
+ @Test
+ public void testMultipleInstanceIsActiveCallsOnlyCreateExecutorOnce() throws Exception {
+ TestableNotificationHookConsumer notificationHookConsumer = new TestableNotificationHookConsumer();
+
+ when(serviceState.getState()).thenReturn(ServiceState.ServiceStateValue.ACTIVE);
+ when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(true);
+ when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
+ when(notificationInterface.createConsumers(NotificationType.HOOK, 1))
+ .thenReturn(Collections.singletonList(mock(NotificationConsumer.class)));
+
+ notificationHookConsumer.instanceIsActive();
+ notificationHookConsumer.instanceIsActive(); // should not recreate
+
+ assertEquals(notificationHookConsumer.getExecutorCreationCount(), 1,
+ "Executor should be created only once even if instanceIsActive is called multiple times");
+ }
+
+ @Test
+ public void testStartInternalThenInstanceIsActiveDoesNotCreateExecutorAgain() throws Exception {
+ TestableNotificationHookConsumer notificationHookConsumer =
+ new TestableNotificationHookConsumer();
+
+ when(serviceState.getState()).thenReturn(ServiceState.ServiceStateValue.ACTIVE);
+ when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(false);
+ when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
+ when(notificationInterface.createConsumers(NotificationType.HOOK, 1))
+ .thenReturn(Collections.singletonList(mock(NotificationConsumer.class)));
+
+ notificationHookConsumer.startInternal(configuration, null);
+ notificationHookConsumer.instanceIsActive(); // executor already exists
+
+ assertEquals(notificationHookConsumer.getExecutorCreationCount(), 1,
+ "Executor should not be created again in instanceIsActive if already created in startInternal");
+ }
+
+ @Test
+ public void testImportConsumerUsesExistingExecutor() throws Exception {
+ TestableNotificationHookConsumer notificationHookConsumer =
+ new TestableNotificationHookConsumer();
+
+ String importId = "test-import-id";
+ String topic = "ATLAS_IMPORT_" + importId;
+
+ when(notificationInterface.createConsumers(NotificationType.ASYNC_IMPORT, 1))
+ .thenReturn(Collections.singletonList(mock(NotificationConsumer.class)));
+
+ // Manually trigger executor creation
+ notificationHookConsumer.startInternal(configuration, null);
+
+ // Call import consumer – should use the same executor
+ notificationHookConsumer.startAsyncImportConsumer(NotificationType.ASYNC_IMPORT, importId, topic);
+
+ assertEquals(notificationHookConsumer.getExecutorCreationCount(), 1,
+ "startImportNotificationConsumer should reuse existing executor and not create a new one");
+ }
+
+ @Test
+ public void testHookConsumersNotStartedWhenConsumersAreDisabled() throws Exception {
+ // Arrange
+ when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(false);
+ when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
+
+ // TestableNotificationHookConsumer with override that sets consumerDisabled = true
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter) {
+ @Override
+ protected ExecutorService createExecutor() {
+ return mock(ExecutorService.class);
+ }
+
+ @Override
+ void startHookConsumers() {
+ throw new RuntimeException("startHookConsumers should not be called when consumers are disabled");
+ }
+ };
+
+ // Use reflection to manually set the consumerDisabled field to true
+ Field consumerDisabledField = NotificationHookConsumer.class.getDeclaredField("consumerDisabled");
+ consumerDisabledField.setAccessible(true);
+ consumerDisabledField.set(notificationHookConsumer, true);
+
+ // Act
+ notificationHookConsumer.startInternal(configuration, null);
+
+ // Assert
+ // No exception = test passed; if startHookConsumers() is invoked, it will throw
+ }
+
private NotificationHookConsumer setupNotificationHookConsumer() throws AtlasException {
- List> consumers = new ArrayList();
+ List> consumers = new ArrayList<>();
NotificationConsumer notificationConsumerMock = mock(NotificationConsumer.class);
consumers.add(notificationConsumerMock);
@@ -342,6 +502,25 @@ private NotificationHookConsumer setupNotificationHookConsumer() throws AtlasExc
when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
when(notificationConsumerMock.receive()).thenThrow(new IllegalStateException());
when(notificationInterface.createConsumers(NotificationType.HOOK, 1)).thenReturn(consumers);
- return new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null);
+
+ return new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
+ }
+
+ class TestableNotificationHookConsumer extends NotificationHookConsumer {
+ int executorCreationCount;
+
+ TestableNotificationHookConsumer() throws AtlasException {
+ super(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry, metricsUtil, null, asyncImporter);
+ }
+
+ @Override
+ protected ExecutorService createExecutor() {
+ executorCreationCount++;
+ return mock(ExecutorService.class);
+ }
+
+ public int getExecutorCreationCount() {
+ return executorCreationCount;
+ }
}
}
diff --git a/webapp/src/test/java/org/apache/atlas/web/filters/HeaderUtilsTest.java b/webapp/src/test/java/org/apache/atlas/web/filters/HeaderUtilsTest.java
index a6113a52f6..d3927f698e 100644
--- a/webapp/src/test/java/org/apache/atlas/web/filters/HeaderUtilsTest.java
+++ b/webapp/src/test/java/org/apache/atlas/web/filters/HeaderUtilsTest.java
@@ -100,7 +100,7 @@ private Properties createPropertiesWithHeaders(String... headers) {
Properties props = new Properties();
for (int i = 0; i < headers.length / 2; i++) {
- props.setProperty(headers[i * 2], headers[(i *2) + 1]);
+ props.setProperty(headers[i * 2], headers[(i * 2) + 1]);
}
return props;