From c6770290c9ff7ac7b77127816f9e1ec05061b130 Mon Sep 17 00:00:00 2001 From: tongwu-msft Date: Fri, 20 Aug 2021 10:05:00 +0800 Subject: [PATCH] Add initial import operation (#1992) Add import operation for customer to initial load data to FHIR service. Sample request: ``` curl --location --request POST 'https://localhost:44348/$import' \ --header 'Accept: application/fhir+json' \ --header 'Prefer: respond-async' \ --header 'Content-Type: application/fhir+json' \ --data-raw '{ "resourceType": "Parameters", "parameter": [ { "name": "inputFormat", "valueString": "application/fhir+ndjson" }, { "name": "mode", "valueString": "InitialLoad" }, { "name": "input", "part": [ { "name": "type", "valueString": "Patient" }, { "name": "url", "valueUri": "http://127.0.0.1:10000/devstoreaccount1/0000/Patient.ndjson" } ] }, { "name": "input", "part": [ { "name": "type", "valueString": "Patient" }, { "name": "url", "valueUri": "http://127.0.0.1:10000/devstoreaccount1/0000/PatientErr.ndjson" } ] }, { "name": "storageDetail", "part": [ { "name": "type", "valueString": "azure-blob" } ] } ] }' ``` Sample result ``` { "transactionTime": "2021-05-17T06:23:47.0892657+00:00", "request": "https://localhost:44348/$import", "output": [ { "type": "Patient", "count": 4, "inputUrl": "http://127.0.0.1:10000/devstoreaccount1/0000/Patient.ndjson" }, { "type": "Patient", "count": 4, "inputUrl": "http://127.0.0.1:10000/devstoreaccount1/0000/PatientErr.ndjson" } ], "error": [ { "type": "Patient", "count": 1, "inputUrl": "http://127.0.0.1:10000/devstoreaccount1/0000/PatientErr.ndjson", "url": "http://127.0.0.1:10000/devstoreaccount1/fhirlogs/Patient38d5d864efcc40ecbdf31ee63dea7f89.ndjson" } ] } ``` --- build/jobs/e2e-tests.yml | 13 + build/jobs/provision-deploy.yml | 2 + .../templates/default-azuredeploy-docker.json | 25 +- samples/templates/default-azuredeploy.json | 25 +- .../Features/ActionResults/ImportResult.cs | 47 + .../BackgroundTaskService/TaskFactory.cs | 109 + .../TaskHostingBackgroundService.cs | 73 + .../ValidateImportRequestFilterAttribute.cs | 51 + .../Import/InitialImportLockMiddleware.cs | 79 + .../InitialImportLockMiddlewareExtensions.cs | 20 + .../Features/Routing/KnownRoutes.cs | 4 + .../Features/Routing/RouteNames.cs | 4 + .../Features/Routing/UrlResolver.cs | 3 + .../Resources.Designer.cs | 45 + src/Microsoft.Health.Fhir.Api/Resources.resx | 16 + ...ntainerRegistryAccessTokenProviderTests.cs | 2 +- .../AzureAccessTokenClientInitializerTests.cs | 1 + .../CloudBlockBlobWrapperTests.cs | 2 - ...zureBlobIntegrationDataStoreClientTests.cs | 337 ++ ...ureContainerRegistryAccessTokenProvider.cs | 2 +- .../AzureAccessTokenClientInitializer.cs | 1 + .../AzureAccessTokenProvider.cs | 2 +- ...erverBuilderAzureRegistrationExtensions.cs | 38 + .../AzureAccessTokenClientInitializerV2.cs | 75 + .../AzureBlobIntegrationDataStoreClient.cs | 251 ++ .../AzureBlobSourceStream.cs | 212 + ...zureConnectionStringClientInitializerV2.cs | 66 + ...grationStoreRetryExceptionPolicyFactory.cs | 71 + .../Import/CancelImportRequestHandlerTests.cs | 116 + .../Import/GetImportRequestHandlerTests.cs | 126 + .../Import/ImportOrchestratorTaskTests.cs | 773 ++++ .../Import/ImportProcessingTaskTests.cs | 375 ++ .../Import/ImportResourceLoaderTests.cs | 273 ++ .../Operations/Import/TestTaskManager.cs | 48 + ...icrosoft.Health.Fhir.Core.UnitTests.csproj | 1 + .../Configs/ImportTaskConfiguration.cs | 102 + .../IntegrationDataStoreConfiguration.cs | 34 + .../Configs/OperationsConfiguration.cs | 4 + .../Configs/TaskHostingConfiguration.cs | 30 +- .../Extensions/ImportMediatorExtensions.cs | 58 + .../AccessTokenProviderException.cs | 2 +- .../IAccessTokenProvider.cs | 2 +- .../ICompressedRawResourceConverter.cs | 29 + .../Operations/IIntegrationDataStoreClient.cs | 83 + .../IIntegrationDataStoreClientInitilizer.cs | 29 + .../Import/CancelImportRequestHandler.cs | 65 + .../Import/CreateImportRequestHandler.cs | 106 + .../Import/GetImportRequestHandler.cs | 84 + .../Import/IImportErrorSerializer.cs | 31 + .../Operations/Import/IImportErrorStore.cs | 28 + .../Import/IImportErrorStoreFactory.cs | 23 + ...mportOrchestratorTaskDataStoreOperation.cs | 25 + .../Import/IImportResourceLoader.cs | 28 + .../Import/IImportResourceParser.cs | 22 + .../Import/IResourceBulkImporter.cs | 33 + .../Import/IResourceMetaPopulator.cs | 22 + .../Operations/Import/ISequenceIdGenerator.cs | 20 + .../Operations/Import/ImportConstants.cs | 12 + .../Features/Operations/Import/ImportError.cs | 34 + .../Operations/Import/ImportErrorStore.cs | 62 + .../Import/ImportErrorStoreFactory.cs | 34 + .../Import/ImportFailedOperationOutcome.cs | 39 + .../Import/ImportFileEtagNotMatchException.cs | 24 + .../Import/ImportOperationOutcome.cs | 31 + .../Import/ImportOrchestratorTask.cs | 518 +++ .../Import/ImportOrchestratorTaskContext.cs | 31 + .../Import/ImportOrchestratorTaskInputData.cs | 72 + .../Import/ImportOrchestratorTaskProgress.cs | 20 + .../Import/ImportProcessingException.cs | 24 + .../Import/ImportProcessingProgress.cs | 30 + .../Operations/Import/ImportProcessingTask.cs | 268 ++ .../Import/ImportProcessingTaskInputData.cs | 49 + .../Import/ImportProcessingTaskResult.cs | 35 + .../Operations/Import/ImportResource.cs | 57 + .../Operations/Import/ImportResourceLoader.cs | 180 + .../Import/ImportTaskErrorResult.cs | 27 + .../Operations/Import/ImportTaskResult.cs | 38 + .../Operations/Import/Models/ImportRequest.cs | 44 + .../Models/ImportRequestStorageDetail.cs | 20 + .../Operations/Import/Models/InputResource.cs | 28 + .../Operations/Import/ProgressRecord.cs | 32 + .../IntegrationDataStoreClientConstants.cs | 13 + ...tionDataStoreClientInitializerException.cs | 24 + .../IntegrationDataStoreException.cs | 24 + .../Operations/OperationsConstants.cs | 4 + .../Features/Security/DataActions.cs | 3 +- .../Features/Security/roles.schema.json | 1 + .../BulkImport/CancelImportRequest.cs | 25 + .../BulkImport/CancelImportResponse.cs | 22 + .../BulkImport/CreateImportRequest.cs | 57 + .../BulkImport/CreateImportResponse.cs | 24 + .../Messages/BulkImport/GetImportRequest.cs | 25 + .../Messages/BulkImport/GetImportResponse.cs | 34 + .../Resources.Designer.cs | 27 + src/Microsoft.Health.Fhir.Core/Resources.resx | 10 + .../Microsoft.Health.Fhir.R4.Client.csproj | 3 + .../Properties/launchSettings.json | 4 + .../Microsoft.Health.Fhir.R5.Client.csproj | 3 + .../Microsoft.Health.Fhir.R5.Web.csproj | 1 + .../Properties/launchSettings.json | 4 + .../Controllers/ImportControllerTests.cs | 204 + .../Features/Filters/FilterTestsHelper.cs | 8 + ...teBulkImportRequestFilterAttributeTests.cs | 174 + .../Headers/ImportResultExtensionsTests.cs | 43 + .../Import/ImportRequestExtensionsTests.cs | 48 + .../InitialImportLockMiddlewareTests.cs | 100 + .../Features/Routing/UrlResolverTests.cs | 2 +- ...Health.Fhir.Shared.Api.UnitTests.projitems | 5 + .../Controllers/ImportController.cs | 203 + .../Import/ImportRequestExtensions.cs | 160 + .../Operations/ParametersExtensions.cs | 61 + ...Microsoft.Health.Fhir.Shared.Api.projitems | 3 + .../Modules/FhirModule.cs | 1 + .../Modules/OperationsModule.cs | 22 + .../FhirServerServiceCollectionExtensions.cs | 5 + .../FhirClient.cs | 37 + .../Import/ImportErrorSerializerTests.cs | 34 + ...ealth.Fhir.Shared.Core.UnitTests.projitems | 1 + .../Import/ImportErrorSerializer.cs | 47 + .../Operations/Import/ImportResourceParser.cs | 86 + ...icrosoft.Health.Fhir.Shared.Core.projitems | 2 + .../Samples.cs | 5 + .../Startup.cs | 26 + .../appsettings.json | 11 + .../roles.json | 11 + .../SqlServerBulkImportOperationTests.cs | 68 + .../Features/Import/UpsertSPRelationTests.cs | 62 + .../CompressedRawResourceConverterTests.cs | 8 +- ...entAssignmentTableBulkCopyDataGenerator.cs | 71 + ...eSearchParamsTableBulkCopyDataGenerator.cs | 68 + ...rSearchParamsTableBulkCopyDataGenerator.cs | 68 + ...ySearchParamsTableBulkCopyDataGenerator.cs | 72 + ...eSearchParamsTableBulkCopyDataGenerator.cs | 70 + ...eSearchParamsTableBulkCopyDataGenerator.cs | 74 + .../ResourceTableBulkCopyDataGenerator.cs | 64 + ...rceWriteClaimTableBulkCopyDataGenerator.cs | 69 + ...archParamtersTableBulkCopyDataGenerator.cs | 40 + ...gSearchParamsTableBulkCopyDataGenerator.cs | 66 + .../TableBulkCopyDataGenerator.cs | 32 + ...eSearchParamsTableBulkCopyDataGenerator.cs | 73 + ...eSearchParamsTableBulkCopyDataGenerator.cs | 80 + ...eSearchParamsTableBulkCopyDataGenerator.cs | 76 + ...nSearchParamsTableBulkCopyDataGenerator.cs | 66 + ...eSearchParamsTableBulkCopyDataGenerator.cs | 70 + ...tSearchParamsTableBulkCopyDataGenerator.cs | 64 + ...eSearchParamsTableBulkCopyDataGenerator.cs | 70 + ...iSearchParamsTableBulkCopyDataGenerator.cs | 64 + .../Import/ISqlBulkCopyDataWrapperFactory.cs | 25 + .../Operations/Import/ISqlImportOperation.cs | 38 + .../Import/ImportResourceSqlExtentions.cs | 23 + .../Import/SqlBulkCopyDataWrapper.cs | 64 + .../Import/SqlBulkCopyDataWrapperFactory.cs | 54 + .../Operations/Import/SqlDbTypeExtensions.cs | 50 + .../Operations/Import/SqlImportOperation.cs | 388 ++ .../Import/SqlResourceBulkImporter.cs | 401 ++ .../Import/SqlResourceMetaPopulator.cs | 24 + .../Import/SqlStoreSequenceIdGenerator.cs | 23 + .../Features/Schema/Migrations/16.diff.sql | 2 +- .../Features/Schema/Migrations/16.sql | 2 +- .../Features/Schema/Migrations/17.diff.sql | 447 ++ .../Features/Schema/Migrations/17.sql | 3853 +++++++++++++++++ .../Features/Schema/SchemaVersion.cs | 1 + .../Features/Schema/SchemaVersionConstants.cs | 2 +- .../Features/Search/SqlServerSearchService.cs | 6 +- .../Storage/CompressedRawResourceConverter.cs | 11 +- .../Storage/SqlServerFhirDataStore.cs | 9 +- .../Features/Storage/SqlServerTaskConsumer.cs | 5 +- .../Storage/SqlServerTaskContextUpdater.cs | 67 + .../SqlServerTaskContextUpdaterFactory.cs | 37 + .../Features/Storage/SqlServerTaskManager.cs | 82 +- ...lkDateTimeSearchParameterV1RowGenerator.cs | 3 + .../BulkSearchParameterRowGenerator.cs | 2 +- .../BulkTokenSearchParameterV1RowGenerator.cs | 3 + .../Microsoft.Health.Fhir.SqlServer.csproj | 4 +- ...rBuilderSqlServerRegistrationExtensions.cs | 111 + .../Resources.Designer.cs | 9 + .../Resources.resx | 4 + .../Microsoft.Health.Fhir.Stu3.Client.csproj | 3 + .../Properties/launchSettings.json | 4 + .../Categories.cs | 2 + .../Microsoft.Health.Fhir.Tests.Common.csproj | 9 + .../Import-DupPatientTemplate.ndjson | 2 + .../Normative/Import-InvalidPatient.ndjson | 5 + .../TestFiles/Normative/Import-Patient.ndjson | 4 + .../Import-SinglePatientTemplate.ndjson | 1 + .../AuditEventSubType.cs | 2 + .../TaskHostingTests.cs | 48 +- .../TestTask.cs | 8 - .../IContextUpdater.cs | 6 +- .../IContextUpdaterFactory.cs | 20 + src/Microsoft.Health.TaskManagement/ITask.cs | 6 - .../ITaskManager.cs | 3 +- .../TaskHosting.cs | 15 +- .../TaskResultData.cs | 4 + .../Microsoft.Health.Fhir.R4.Tests.E2E.csproj | 1 + .../TestUsers.cs | 8 + ...oft.Health.Fhir.Shared.Tests.E2E.projitems | 22 + ...rosoft.Health.Fhir.Shared.Tests.E2E.shproj | 1 + .../Rest/BatchTests.cs | 1 - .../Rest/ExportDataValidationTests.cs | 1 - .../Import/ImportBasicSearchTestFixture.cs | 60 + .../Rest/Import/ImportBasicSearchTests.cs | 56 + .../ImportCompositeSearchTestFixture.cs | 99 + .../Rest/Import/ImportCompositeSearchTests.cs | 145 + .../Import/ImportDateSearchTestFixture.cs | 46 + .../Rest/Import/ImportDateSearchTests.cs | 162 + .../Import/ImportNumberSearchTestFixture.cs | 48 + .../Rest/Import/ImportNumberSearchTests.cs | 68 + .../Import/ImportQuantitySearchTestFixture.cs | 49 + .../Rest/Import/ImportQuantitySearchTests.cs | 173 + .../ImportReferenceSearchTestFixture.cs | 37 + .../Rest/Import/ImportReferenceSearchTests.cs | 52 + .../Import/ImportStringSearchTestFixture.cs | 57 + .../Rest/Import/ImportStringSearchTests.cs | 138 + .../Rest/Import/ImportTestFixture.cs | 48 + .../Rest/Import/ImportTestHelper.cs | 177 + .../Rest/Import/ImportTests.cs | 487 +++ .../Import/ImportTokenSearchTestFixture.cs | 76 + .../Rest/Import/ImportTokenSearchTests.cs | 133 + .../Rest/Import/ImportUriSearchTestFixture.cs | 45 + .../Rest/Import/ImportUriSearchTests.cs | 52 + .../Import/StartupForImportTestProvider.cs | 26 + .../Rest/InProcTestFhirServer.cs | 6 + .../Operations/Import/DataGeneratorsTests.cs | 150 + .../Operations/Import/SqlBulkImporterTests.cs | 409 ++ .../SqlServerFhirDataBulkOperationTests.cs | 430 ++ .../Operations/Import/TestBulkDataProvider.cs | 267 ++ ...lServerTransientFaultRetryPolicyFactory.cs | 18 + ...th.Fhir.Shared.Tests.Integration.projitems | 5 + .../Persistence/FhirStorageTests.cs | 2 - .../SqlServerFhirStorageTestsFixture.cs | 5 + .../SqlServerSchemaUpgradeTests.cs | 2 + .../Persistence/SqlServerTaskConsumerTests.cs | 81 +- .../Persistence/SqlServerTaskManagerTests.cs | 125 +- testauthenvironment.json | 6 + 235 files changed, 18387 insertions(+), 138 deletions(-) create mode 100644 src/Microsoft.Health.Fhir.Api/Features/ActionResults/ImportResult.cs create mode 100644 src/Microsoft.Health.Fhir.Api/Features/BackgroundTaskService/TaskFactory.cs create mode 100644 src/Microsoft.Health.Fhir.Api/Features/BackgroundTaskService/TaskHostingBackgroundService.cs create mode 100644 src/Microsoft.Health.Fhir.Api/Features/Filters/ValidateImportRequestFilterAttribute.cs create mode 100644 src/Microsoft.Health.Fhir.Api/Features/Operations/Import/InitialImportLockMiddleware.cs create mode 100644 src/Microsoft.Health.Fhir.Api/Features/Operations/Import/InitialImportLockMiddlewareExtensions.cs create mode 100644 src/Microsoft.Health.Fhir.Azure.UnitTests/IntegrationDataStore/AzureBlobIntegrationDataStoreClientTests.cs create mode 100644 src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureAccessTokenClientInitializerV2.cs create mode 100644 src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureBlobIntegrationDataStoreClient.cs create mode 100644 src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureBlobSourceStream.cs create mode 100644 src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureConnectionStringClientInitializerV2.cs create mode 100644 src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/IntegrationStoreRetryExceptionPolicyFactory.cs create mode 100644 src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/CancelImportRequestHandlerTests.cs create mode 100644 src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs create mode 100644 src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/ImportOrchestratorTaskTests.cs create mode 100644 src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/ImportProcessingTaskTests.cs create mode 100644 src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/ImportResourceLoaderTests.cs create mode 100644 src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/TestTaskManager.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Configs/ImportTaskConfiguration.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Configs/IntegrationDataStoreConfiguration.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Extensions/ImportMediatorExtensions.cs rename src/Microsoft.Health.Fhir.Core/Features/Operations/{Export/ExportDestinationClient => }/AccessTokenProviderException.cs (88%) rename src/Microsoft.Health.Fhir.Core/Features/Operations/{Export/ExportDestinationClient => }/IAccessTokenProvider.cs (92%) create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/ICompressedRawResourceConverter.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/IIntegrationDataStoreClient.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/IIntegrationDataStoreClientInitilizer.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/CancelImportRequestHandler.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/CreateImportRequestHandler.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportErrorSerializer.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportErrorStore.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportErrorStoreFactory.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportOrchestratorTaskDataStoreOperation.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportResourceLoader.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportResourceParser.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IResourceBulkImporter.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IResourceMetaPopulator.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ISequenceIdGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportConstants.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportError.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStore.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStoreFactory.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFailedOperationOutcome.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFileEtagNotMatchException.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOperationOutcome.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTask.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTaskContext.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTaskInputData.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTaskProgress.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingException.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingProgress.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingTask.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingTaskInputData.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingTaskResult.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportResource.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportResourceLoader.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportTaskErrorResult.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportTaskResult.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/Models/ImportRequest.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/Models/ImportRequestStorageDetail.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/Models/InputResource.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ProgressRecord.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/IntegrationDataStoreClientConstants.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/IntegrationDataStoreClientInitializerException.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/IntegrationDataStoreException.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CancelImportRequest.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CancelImportResponse.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CreateImportRequest.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CreateImportResponse.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Messages/BulkImport/GetImportRequest.cs create mode 100644 src/Microsoft.Health.Fhir.Core/Messages/BulkImport/GetImportResponse.cs create mode 100644 src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Controllers/ImportControllerTests.cs create mode 100644 src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Filters/ValidateBulkImportRequestFilterAttributeTests.cs create mode 100644 src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Headers/ImportResultExtensionsTests.cs create mode 100644 src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Operations/Import/ImportRequestExtensionsTests.cs create mode 100644 src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Operations/Import/InitialImportLockMiddlewareTests.cs create mode 100644 src/Microsoft.Health.Fhir.Shared.Api/Controllers/ImportController.cs create mode 100644 src/Microsoft.Health.Fhir.Shared.Api/Features/Operations/Import/ImportRequestExtensions.cs create mode 100644 src/Microsoft.Health.Fhir.Shared.Api/Features/Operations/ParametersExtensions.cs create mode 100644 src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Operations/Import/ImportErrorSerializerTests.cs create mode 100644 src/Microsoft.Health.Fhir.Shared.Core/Features/Operations/Import/ImportErrorSerializer.cs create mode 100644 src/Microsoft.Health.Fhir.Shared.Core/Features/Operations/Import/ImportResourceParser.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Import/SqlServerBulkImportOperationTests.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Import/UpsertSPRelationTests.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/CompartmentAssignmentTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/DateTimeSearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/NumberSearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/QuantitySearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ReferenceSearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ReferenceTokenCompositeSearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ResourceTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ResourceWriteClaimTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/SearchParamtersTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/StringSearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenSearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenStringCompositeSearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenTextSearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenTokenCompositeSearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/UriSearchParamsTableBulkCopyDataGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ISqlBulkCopyDataWrapperFactory.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ISqlImportOperation.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportResourceSqlExtentions.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlBulkCopyDataWrapper.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlBulkCopyDataWrapperFactory.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlDbTypeExtensions.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlImportOperation.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlResourceBulkImporter.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlResourceMetaPopulator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlStoreSequenceIdGenerator.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/17.diff.sql create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/17.sql create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskContextUpdater.cs create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskContextUpdaterFactory.cs create mode 100644 src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-DupPatientTemplate.ndjson create mode 100644 src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-InvalidPatient.ndjson create mode 100644 src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-Patient.ndjson create mode 100644 src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-SinglePatientTemplate.ndjson create mode 100644 src/Microsoft.Health.TaskManagement/IContextUpdaterFactory.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportBasicSearchTestFixture.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportBasicSearchTests.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportCompositeSearchTestFixture.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportCompositeSearchTests.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportDateSearchTestFixture.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportDateSearchTests.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportNumberSearchTestFixture.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportNumberSearchTests.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportQuantitySearchTestFixture.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportQuantitySearchTests.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportReferenceSearchTestFixture.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportReferenceSearchTests.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportStringSearchTestFixture.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportStringSearchTests.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestFixture.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestHelper.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTokenSearchTestFixture.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTokenSearchTests.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportUriSearchTestFixture.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportUriSearchTests.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/StartupForImportTestProvider.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/DataGeneratorsTests.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/SqlBulkImporterTests.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/SqlServerFhirDataBulkOperationTests.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/TestBulkDataProvider.cs create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/TestSqlServerTransientFaultRetryPolicyFactory.cs diff --git a/build/jobs/e2e-tests.yml b/build/jobs/e2e-tests.yml index f285c1f782..00a31f0d1a 100644 --- a/build/jobs/e2e-tests.yml +++ b/build/jobs/e2e-tests.yml @@ -60,6 +60,15 @@ steps: Write-Host "##vso[task.setvariable variable=TestExportStoreUri]$($exportStoreUri)" Write-Host "##vso[task.setvariable variable=TestExportStoreKey]$($exportStoreKey.Value)" + $integrationStoreSettings = $appSettings | where {$_.Name -eq "FhirServer__Operations__IntegrationDataStore__StorageAccountUri"} + $integrationStoreUri = $integrationStoreSettings[0].Value + Write-Host "$integrationStoreUri" + $integrationStoreAccountName = [System.Uri]::new("$integrationStoreUri").Host.Split('.')[0] + $integrationStoreKey = Get-AzStorageAccountKey -ResourceGroupName $(ResourceGroupName) -Name "$integrationStoreAccountName" | Where-Object {$_.KeyName -eq "key1"} + + Write-Host "##vso[task.setvariable variable=TestIntegrationStoreUri]$($integrationStoreUri)" + Write-Host "##vso[task.setvariable variable=TestIntegrationStoreKey]$($integrationStoreKey.Value)" + Write-Host "##vso[task.setvariable variable=Resource]$(TestEnvironmentUrl)" $secrets = Get-AzKeyVaultSecret -VaultName resolute-oss-tenant-info @@ -99,6 +108,8 @@ steps: 'TestContainerRegistryPassword': $(TestContainerRegistryPassword) 'TestExportStoreUri': $(TestExportStoreUri) 'TestExportStoreKey': $(TestExportStoreKey) + 'TestIntegrationStoreUri': $(TestIntegrationStoreUri) + 'TestIntegrationStoreKey': $(TestIntegrationStoreKey) 'tenant-admin-service-principal-name': $(tenant-admin-service-principal-name) 'tenant-admin-service-principal-password': $(tenant-admin-service-principal-password) 'tenant-admin-user-name': $(tenant-admin-user-name) @@ -116,6 +127,8 @@ steps: 'user_globalConverterUser_secret': $(user_globalConverterUser_secret) 'user_globalExporterUser_id': $(user_globalExporterUser_id) 'user_globalExporterUser_secret': $(user_globalExporterUser_secret) + 'user_globalImporterUser_id': $(user_globalImporterUser_id) + 'user_globalImporterUser_secret': $(user_globalImporterUser_secret) 'user_globalReaderUser_id': $(user_globalReaderUser_id) 'user_globalReaderUser_secret': $(user_globalReaderUser_secret) 'user_globalWriterUser_id': $(user_globalWriterUser_id) diff --git a/build/jobs/provision-deploy.yml b/build/jobs/provision-deploy.yml index 22797244d2..99d4c99b8d 100644 --- a/build/jobs/provision-deploy.yml +++ b/build/jobs/provision-deploy.yml @@ -71,6 +71,8 @@ jobs: enableAadSmartOnFhirProxy = $true enableExport = $true enableConvertData = $true + enableImport = $true + backgroundTaskCount = 5 enableReindex = if ("${{ parameters.reindexEnabled }}" -eq "true") { $true } else { $false } imageTag = '${{ parameters.imageTag }}' } diff --git a/samples/templates/default-azuredeploy-docker.json b/samples/templates/default-azuredeploy-docker.json index 80c458ea91..15d546c204 100644 --- a/samples/templates/default-azuredeploy-docker.json +++ b/samples/templates/default-azuredeploy-docker.json @@ -198,6 +198,20 @@ "metadata": { "description": "Determines whether the $reindex operation will be enabled for this fhir instance." } + }, + "enableImport": { + "type": "bool", + "defaultValue": false, + "metadata": { + "description": "Determines whether the $import operation will be enabled for this fhir instance." + } + }, + "backgroundTaskCount": { + "type": "int", + "defaultValue": 1, + "metadata": { + "description": "Supports parallel background task running" + } } }, "variables": { @@ -214,6 +228,7 @@ "storageBlobDataContributerRoleId": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Authorization/roleDefinitions/', 'ba92f5b4-2d11-453d-a403-e96b0029c9fe')]", "acrPullRoleId": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Authorization/roleDefinitions/', '7f951dda-4ed3-4680-a7ca-43fe172d538d')]", "blobStorageUri": "[if(variables('isMAG'), '.blob.core.usgovcloudapi.net', '.blob.core.windows.net')]", + "enableIntegrationStore": "[or(parameters('enableExport'), parameters('enableImport'))]", "staticFhirServerConfigProperties": { "APPINSIGHTS_PORTALINFO": "ASP.NETCORE", "APPINSIGHTS_PROFILERFEATURE_VERSION": "1.0.0", @@ -228,11 +243,15 @@ "SqlServer__Initialize": "[equals(parameters('solutionType'),'FhirServerSqlServer')]", "SqlServer__SchemaOptions__AutomaticUpdatesEnabled": "[if(equals(parameters('sqlSchemaAutomaticUpdatesEnabled'),'auto'), true(), false())]", "DataStore": "[if(equals(parameters('solutionType'),'FhirServerCosmosDB'), 'CosmosDb', 'SqlServer')]", + "TaskHosting__Enabled": "[if(equals(parameters('solutionType'),'FhirServerCosmosDB'), false(), parameters('enableImport'))]", + "TaskHosting__MaxRunningTaskCount": "[parameters('backgroundTaskCount')]", + "FhirServer__Operations__IntegrationDataStore__StorageAccountUri": "[if(parameters('enableImport'), concat('https://', variables('storageAccountName'), variables('blobStorageUri')), 'null')]", "FhirServer__Operations__Export__Enabled": "[parameters('enableExport')]", "FhirServer__Operations__Export__StorageAccountUri": "[if(parameters('enableExport'), concat('https://', variables('storageAccountName'), variables('blobStorageUri')), 'null')]", "FhirServer__Operations__ConvertData__Enabled": "[parameters('enableConvertData')]", "FhirServer__Operations__ConvertData__ContainerRegistryServers__0": "[if(parameters('enableConvertData'), concat(variables('azureContainerRegistryName'), variables('azureContainerRegistryUri')), 'null')]", - "FhirServer__Operations__Reindex__Enabled": "[parameters('enableReindex')]" + "FhirServer__Operations__Reindex__Enabled": "[parameters('enableReindex')]", + "FhirServer__Operations__Import__Enabled": "[parameters('enableImport')]" }, "combinedFhirServerConfigProperties": "[union(variables('staticFhirServerConfigProperties'), parameters('additionalFhirServerConfigProperties'))]", "computedSqlServerReference": "[concat('Microsoft.Sql/servers/', variables('serviceName'))]", @@ -504,7 +523,7 @@ "properties": { "supportsHttpsTrafficOnly": true }, - "condition": "[parameters('enableExport')]", + "condition": "[variables('enableIntegrationStore')]", "dependsOn": [], "sku": { "name": "Standard_LRS" @@ -516,7 +535,7 @@ "type": "Microsoft.Storage/storageAccounts/providers/roleAssignments", "apiVersion": "2018-09-01-preview", "name": "[concat(variables('storageAccountName'), '/Microsoft.Authorization/', guid(uniqueString(variables('storageAccountName'), parameters('fhirVersion'), variables('serviceName'))))]", - "condition": "[parameters('enableExport')]", + "condition": "[variables('enableIntegrationStore')]", "dependsOn": [ "[variables('storageAccountName')]", "[variables('serviceName')]" diff --git a/samples/templates/default-azuredeploy.json b/samples/templates/default-azuredeploy.json index 41e2f693ec..29189fe298 100644 --- a/samples/templates/default-azuredeploy.json +++ b/samples/templates/default-azuredeploy.json @@ -198,6 +198,20 @@ "metadata": { "description": "Determines whether the $reindex operation will be enabled for this fhir instance." } + }, + "enableImport": { + "type": "bool", + "defaultValue": false, + "metadata": { + "description": "Determines whether the $import operation will be enabled for this fhir instance." + } + }, + "backgroundTaskCount": { + "type": "int", + "defaultValue": 1, + "metadata": { + "description": "Supports parallel background task running" + } } }, "variables": { @@ -216,6 +230,7 @@ "storageBlobDataContributerRoleId": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Authorization/roleDefinitions/', 'ba92f5b4-2d11-453d-a403-e96b0029c9fe')]", "acrPullRoleId": "[concat('/subscriptions/', subscription().subscriptionId, '/providers/Microsoft.Authorization/roleDefinitions/', '7f951dda-4ed3-4680-a7ca-43fe172d538d')]", "blobStorageUri": "[if(variables('isMAG'), '.blob.core.usgovcloudapi.net', '.blob.core.windows.net')]", + "enableIntegrationStore": "[or(parameters('enableExport'), parameters('enableImport'))]", "staticFhirServerConfigProperties": { "APPINSIGHTS_PORTALINFO": "ASP.NETCORE", "APPINSIGHTS_PROFILERFEATURE_VERSION": "1.0.0", @@ -232,10 +247,14 @@ "SqlServer:SchemaOptions:AutomaticUpdatesEnabled": "[if(equals(parameters('sqlSchemaAutomaticUpdatesEnabled'),'auto'), true(), false())]", "DataStore": "[if(equals(parameters('solutionType'),'FhirServerCosmosDB'), 'CosmosDb', 'SqlServer')]", "FhirServer:Operations:Export:Enabled": "[parameters('enableExport')]", + "TaskHosting:Enabled": "[if(equals(parameters('solutionType'),'FhirServerCosmosDB'), false(), parameters('enableImport'))]", + "TaskHosting:MaxRunningTaskCount": "[parameters('backgroundTaskCount')]", + "FhirServer:Operations:IntegrationDataStore:StorageAccountUri": "[if(parameters('enableImport'), concat('https://', variables('storageAccountName'), variables('blobStorageUri')), 'null')]", "FhirServer:Operations:Export:StorageAccountUri": "[if(parameters('enableExport'), concat('https://', variables('storageAccountName'), variables('blobStorageUri')), 'null')]", "FhirServer:Operations:ConvertData:Enabled": "[parameters('enableConvertData')]", "FhirServer:Operations:ConvertData:ContainerRegistryServers:0": "[if(parameters('enableConvertData'), concat(variables('azureContainerRegistryName'), variables('azureContainerRegistryUri')), 'null')]", - "FhirServer:Operations:Reindex:Enabled": "[parameters('enableReindex')]" + "FhirServer:Operations:Reindex:Enabled": "[parameters('enableReindex')]", + "FhirServer:Operations:Import:Enabled": "[parameters('enableImport')]" }, "combinedFhirServerConfigProperties": "[union(variables('staticFhirServerConfigProperties'), parameters('additionalFhirServerConfigProperties'))]", "computedSqlServerReference": "[concat('Microsoft.Sql/servers/', variables('serviceName'))]", @@ -497,7 +516,7 @@ "properties": { "supportsHttpsTrafficOnly": true }, - "condition": "[parameters('enableExport')]", + "condition": "[variables('enableIntegrationStore')]", "dependsOn": [], "sku": { "name": "Standard_LRS" @@ -509,7 +528,7 @@ "type": "Microsoft.Storage/storageAccounts/providers/roleAssignments", "apiVersion": "2018-09-01-preview", "name": "[concat(variables('storageAccountName'), '/Microsoft.Authorization/', guid(uniqueString(variables('storageAccountName'), parameters('fhirVersion'), variables('serviceName'))))]", - "condition": "[parameters('enableExport')]", + "condition": "[variables('enableIntegrationStore')]", "dependsOn": [ "[variables('storageAccountName')]", "[variables('serviceName')]" diff --git a/src/Microsoft.Health.Fhir.Api/Features/ActionResults/ImportResult.cs b/src/Microsoft.Health.Fhir.Api/Features/ActionResults/ImportResult.cs new file mode 100644 index 0000000000..f3afa5d095 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Api/Features/ActionResults/ImportResult.cs @@ -0,0 +1,47 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Net; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; + +namespace Microsoft.Health.Fhir.Api.Features.ActionResults +{ + /// + /// Used to return the result of a bulk import operation. + /// + public class ImportResult : ResourceActionResult + { + public ImportResult(HttpStatusCode statusCode) + : base(null, statusCode) + { + } + + public ImportResult(ImportTaskResult jobResult, HttpStatusCode statusCode) + : base(jobResult, statusCode) + { + EnsureArg.IsNotNull(jobResult, nameof(jobResult)); + } + + /// + /// Creates an ImportResult with HttpStatusCode Accepted. + /// + public static ImportResult Accepted() + { + return new ImportResult(HttpStatusCode.Accepted); + } + + /// + /// Creates an ImportResult with HttpStatusCode Ok. + /// + /// The job payload that must be returned as part of the ImportResult. + public static ImportResult Ok(ImportTaskResult taskResult) + { + EnsureArg.IsNotNull(taskResult, nameof(taskResult)); + + return new ImportResult(taskResult, HttpStatusCode.OK); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Api/Features/BackgroundTaskService/TaskFactory.cs b/src/Microsoft.Health.Fhir.Api/Features/BackgroundTaskService/TaskFactory.cs new file mode 100644 index 0000000000..ff36fb5ec7 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Api/Features/BackgroundTaskService/TaskFactory.cs @@ -0,0 +1,109 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using EnsureThat; +using Microsoft.Extensions.Logging; +using Microsoft.Health.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Features.Operations; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.TaskManagement; +using Newtonsoft.Json; + +namespace Microsoft.Health.Fhir.Api.Features.BackgroundTaskService +{ + /// + /// Factory to create different tasks. + /// + public class TaskFactory : ITaskFactory + { + private readonly IImportResourceLoader _importResourceLoader; + private readonly IResourceBulkImporter _resourceBulkImporter; + private readonly IImportErrorStoreFactory _importErrorStoreFactory; + private readonly IImportOrchestratorTaskDataStoreOperation _importOrchestratorTaskDataStoreOperation; + private readonly ISequenceIdGenerator _sequenceIdGenerator; + private readonly IIntegrationDataStoreClient _integrationDataStoreClient; + private readonly ITaskManager _taskmanager; + private readonly IContextUpdaterFactory _contextUpdaterFactory; + private readonly RequestContextAccessor _contextAccessor; + private readonly ILoggerFactory _loggerFactory; + + public TaskFactory( + IImportResourceLoader importResourceLoader, + IResourceBulkImporter resourceBulkImporter, + IImportErrorStoreFactory importErrorStoreFactory, + IImportOrchestratorTaskDataStoreOperation importOrchestratorTaskDataStoreOperation, + IContextUpdaterFactory contextUpdaterFactory, + ITaskManager taskmanager, + ISequenceIdGenerator sequenceIdGenerator, + IIntegrationDataStoreClient integrationDataStoreClient, + RequestContextAccessor contextAccessor, + ILoggerFactory loggerFactory) + { + EnsureArg.IsNotNull(importResourceLoader, nameof(importResourceLoader)); + EnsureArg.IsNotNull(resourceBulkImporter, nameof(resourceBulkImporter)); + EnsureArg.IsNotNull(importErrorStoreFactory, nameof(importErrorStoreFactory)); + EnsureArg.IsNotNull(importOrchestratorTaskDataStoreOperation, nameof(importOrchestratorTaskDataStoreOperation)); + EnsureArg.IsNotNull(contextUpdaterFactory, nameof(contextUpdaterFactory)); + EnsureArg.IsNotNull(taskmanager, nameof(taskmanager)); + EnsureArg.IsNotNull(sequenceIdGenerator, nameof(sequenceIdGenerator)); + EnsureArg.IsNotNull(integrationDataStoreClient, nameof(integrationDataStoreClient)); + EnsureArg.IsNotNull(contextAccessor, nameof(contextAccessor)); + EnsureArg.IsNotNull(loggerFactory, nameof(loggerFactory)); + + _importResourceLoader = importResourceLoader; + _resourceBulkImporter = resourceBulkImporter; + _importErrorStoreFactory = importErrorStoreFactory; + _importOrchestratorTaskDataStoreOperation = importOrchestratorTaskDataStoreOperation; + _sequenceIdGenerator = sequenceIdGenerator; + _integrationDataStoreClient = integrationDataStoreClient; + _taskmanager = taskmanager; + _contextUpdaterFactory = contextUpdaterFactory; + _contextAccessor = contextAccessor; + _loggerFactory = loggerFactory; + } + + public ITask Create(TaskInfo taskInfo) + { + EnsureArg.IsNotNull(taskInfo, nameof(taskInfo)); + + if (taskInfo.TaskTypeId == ImportProcessingTask.ImportProcessingTaskId) + { + IContextUpdater contextUpdater = _contextUpdaterFactory.CreateContextUpdater(taskInfo.TaskId, taskInfo.RunId); + ImportProcessingTaskInputData inputData = JsonConvert.DeserializeObject(taskInfo.InputData); + ImportProcessingProgress importProgress = string.IsNullOrEmpty(taskInfo.Context) ? new ImportProcessingProgress() : JsonConvert.DeserializeObject(taskInfo.Context); + return new ImportProcessingTask( + inputData, + importProgress, + _importResourceLoader, + _resourceBulkImporter, + _importErrorStoreFactory, + contextUpdater, + _contextAccessor, + _loggerFactory); + } + + if (taskInfo.TaskTypeId == ImportOrchestratorTask.ImportOrchestratorTaskId) + { + IContextUpdater contextUpdater = _contextUpdaterFactory.CreateContextUpdater(taskInfo.TaskId, taskInfo.RunId); + ImportOrchestratorTaskInputData inputData = JsonConvert.DeserializeObject(taskInfo.InputData); + ImportOrchestratorTaskContext orchestratorTaskProgress = string.IsNullOrEmpty(taskInfo.Context) ? new ImportOrchestratorTaskContext() : JsonConvert.DeserializeObject(taskInfo.Context); + + return new ImportOrchestratorTask( + inputData, + orchestratorTaskProgress, + _taskmanager, + _sequenceIdGenerator, + contextUpdater, + _contextAccessor, + _importOrchestratorTaskDataStoreOperation, + _integrationDataStoreClient, + _loggerFactory); + } + + return null; + } + } +} diff --git a/src/Microsoft.Health.Fhir.Api/Features/BackgroundTaskService/TaskHostingBackgroundService.cs b/src/Microsoft.Health.Fhir.Api/Features/BackgroundTaskService/TaskHostingBackgroundService.cs new file mode 100644 index 0000000000..ad9ccd9e30 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Api/Features/BackgroundTaskService/TaskHostingBackgroundService.cs @@ -0,0 +1,73 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Threading; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.Health.Extensions.DependencyInjection; +using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.TaskManagement; + +namespace Microsoft.Health.Fhir.Api.Features.BackgroundTaskService +{ + /// + /// The background service used to host the . + /// + public class TaskHostingBackgroundService : BackgroundService + { + private readonly Func> _taskHostingFactory; + private readonly TaskHostingConfiguration _taskHostingConfiguration; + private readonly ILogger _logger; + + public TaskHostingBackgroundService( + Func> taskHostingFactory, + IOptions taskHostingConfiguration, + ILogger logger) + { + EnsureArg.IsNotNull(taskHostingFactory, nameof(taskHostingFactory)); + EnsureArg.IsNotNull(taskHostingConfiguration?.Value, nameof(taskHostingConfiguration)); + EnsureArg.IsNotNull(logger, nameof(logger)); + + _taskHostingFactory = taskHostingFactory; + _taskHostingConfiguration = taskHostingConfiguration.Value; + _logger = logger; + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + _logger.LogInformation("TaskHostingBackgroundService begin."); + + try + { + using (IScoped taskHosting = _taskHostingFactory()) + { + var taskHostingValue = taskHosting.Value; + if (_taskHostingConfiguration != null) + { + taskHostingValue.PollingFrequencyInSeconds = _taskHostingConfiguration.PollingFrequencyInSeconds ?? taskHostingValue.PollingFrequencyInSeconds; + taskHostingValue.MaxRunningTaskCount = _taskHostingConfiguration.MaxRunningTaskCount ?? taskHostingValue.MaxRunningTaskCount; + taskHostingValue.TaskHeartbeatIntervalInSeconds = _taskHostingConfiguration.TaskHeartbeatIntervalInSeconds ?? taskHostingValue.TaskHeartbeatIntervalInSeconds; + taskHostingValue.TaskHeartbeatTimeoutThresholdInSeconds = _taskHostingConfiguration.TaskHeartbeatTimeoutThresholdInSeconds ?? taskHostingValue.TaskHeartbeatTimeoutThresholdInSeconds; + } + + using CancellationTokenSource cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(stoppingToken); + await taskHostingValue.StartAsync(cancellationTokenSource); + } + } + catch (Exception ex) + { + _logger.LogError(ex, "TaskHostingBackgroundService crash."); + } + finally + { + _logger.LogInformation("TaskHostingBackgroundService end."); + } + } + } +} diff --git a/src/Microsoft.Health.Fhir.Api/Features/Filters/ValidateImportRequestFilterAttribute.cs b/src/Microsoft.Health.Fhir.Api/Features/Filters/ValidateImportRequestFilterAttribute.cs new file mode 100644 index 0000000000..9e572a4428 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Api/Features/Filters/ValidateImportRequestFilterAttribute.cs @@ -0,0 +1,51 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using EnsureThat; +using Microsoft.AspNetCore.Mvc.Filters; +using Microsoft.Health.Fhir.Core.Exceptions; +using Microsoft.Net.Http.Headers; + +namespace Microsoft.Health.Fhir.Api.Features.Filters +{ + /// + /// A filter that validates the headers present in the import request. + /// Short-circuits the pipeline if they are invalid. + /// + [AttributeUsage(AttributeTargets.Method)] + internal sealed class ValidateImportRequestFilterAttribute : ActionFilterAttribute + { + private const string PreferHeaderName = "Prefer"; + private const string PreferHeaderExpectedValue = "respond-async"; + private const string ContentTypeHeaderExpectedValue = "application/fhir+json"; + + public ValidateImportRequestFilterAttribute() + { + } + + public override void OnActionExecuting(ActionExecutingContext context) + { + EnsureArg.IsNotNull(context, nameof(context)); + + if (!context.HttpContext.Request.Headers.TryGetValue(PreferHeaderName, out var preferHeaderValue) || + preferHeaderValue.Count != 1 || + !string.Equals(preferHeaderValue[0], PreferHeaderExpectedValue, StringComparison.OrdinalIgnoreCase)) + { + throw new RequestNotValidException(string.Format(Resources.UnsupportedHeaderValue, PreferHeaderName)); + } + + if (string.Equals(context.HttpContext.Request.Method, "POST", StringComparison.OrdinalIgnoreCase)) + { + if (!context.HttpContext.Request.Headers.TryGetValue(HeaderNames.ContentType, out var contentTypeHeaderValue) || + contentTypeHeaderValue.Count != 1 || + !contentTypeHeaderValue[0].Contains(ContentTypeHeaderExpectedValue, StringComparison.OrdinalIgnoreCase)) + { + throw new RequestNotValidException(string.Format(Resources.UnsupportedHeaderValue, HeaderNames.ContentType)); + } + } + } + } +} diff --git a/src/Microsoft.Health.Fhir.Api/Features/Operations/Import/InitialImportLockMiddleware.cs b/src/Microsoft.Health.Fhir.Api/Features/Operations/Import/InitialImportLockMiddleware.cs new file mode 100644 index 0000000000..57a2c67659 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Api/Features/Operations/Import/InitialImportLockMiddleware.cs @@ -0,0 +1,79 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Options; +using Microsoft.Health.Fhir.Core.Configs; + +namespace Microsoft.Health.Fhir.Api.Features.Operations.Import +{ + public sealed class InitialImportLockMiddleware + { + private RequestDelegate _next; + private ImportTaskConfiguration _importTaskConfiguration; + private readonly HashSet<(string method, string pathRegex)> _excludedEndpoints; + + // hard-coding these to minimize resource consumption for locked message + private const string LockedContentType = "application/json; charset=utf-8"; + private static readonly ReadOnlyMemory _lockedBody = CreateLockedBody(Resources.LockedForInitialImportMode); + + public InitialImportLockMiddleware( + RequestDelegate next, + IOptions importTaskConfiguration) + { + _next = EnsureArg.IsNotNull(next, nameof(next)); + _importTaskConfiguration = EnsureArg.IsNotNull(importTaskConfiguration?.Value, nameof(importTaskConfiguration)); + + _excludedEndpoints = new HashSet<(string method, string pathRegex)>() + { + (HttpMethods.Get, ".*"), // Exclude all read operations + (HttpMethods.Post, "/\\$import"), + (HttpMethods.Delete, "/_operations/import/.+"), + }; + } + + public async Task Invoke(HttpContext context) + { + if (!_importTaskConfiguration.Enabled || !_importTaskConfiguration.InitialImportMode) + { + await _next(context); + return; + } + + if (IsExcludedEndpoint(context.Request.Method, context.Request.Path)) + { + await _next(context); + return; + } + + await Return423(context); + } + + private static async Task Return423(HttpContext context) + { + context.Response.StatusCode = StatusCodes.Status423Locked; + context.Response.ContentLength = _lockedBody.Length; + context.Response.ContentType = LockedContentType; + + await context.Response.Body.WriteAsync(_lockedBody); + } + + private bool IsExcludedEndpoint(string method, string path) + { + return _excludedEndpoints.Any(endpoint => + endpoint.method.Equals(method, StringComparison.OrdinalIgnoreCase) && + Regex.IsMatch(path, endpoint.pathRegex, RegexOptions.IgnoreCase)); + } + + private static Memory CreateLockedBody(string message) => Encoding.UTF8.GetBytes($@"{{""severity"":""Error"",""code"":""Locked"",""diagnostics"":""{message}""}}").AsMemory(); + } +} diff --git a/src/Microsoft.Health.Fhir.Api/Features/Operations/Import/InitialImportLockMiddlewareExtensions.cs b/src/Microsoft.Health.Fhir.Api/Features/Operations/Import/InitialImportLockMiddlewareExtensions.cs new file mode 100644 index 0000000000..ba4ca546dd --- /dev/null +++ b/src/Microsoft.Health.Fhir.Api/Features/Operations/Import/InitialImportLockMiddlewareExtensions.cs @@ -0,0 +1,20 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using EnsureThat; +using Microsoft.AspNetCore.Builder; + +namespace Microsoft.Health.Fhir.Api.Features.Operations.Import +{ + public static class InitialImportLockMiddlewareExtensions + { + public static IApplicationBuilder UseInitialImportLock(this IApplicationBuilder builder) + { + EnsureArg.IsNotNull(builder, nameof(builder)); + + return builder.UseMiddleware(); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Api/Features/Routing/KnownRoutes.cs b/src/Microsoft.Health.Fhir.Api/Features/Routing/KnownRoutes.cs index 48a18e0f11..eb005f1f4c 100644 --- a/src/Microsoft.Health.Fhir.Api/Features/Routing/KnownRoutes.cs +++ b/src/Microsoft.Health.Fhir.Api/Features/Routing/KnownRoutes.cs @@ -62,6 +62,10 @@ internal class KnownRoutes public const string PurgeHistoryResourceTypeById = ResourceTypeById + "/" + PurgeHistory; public const string PurgeHistoryOperationDefinition = OperationDefinition + "/" + OperationsConstants.PurgeHistory; + public const string Import = "$import"; + public const string ImportDataOperationDefinition = OperationDefinition + "/" + OperationsConstants.Import; + public const string ImportJobLocation = OperationsConstants.Operations + "/" + OperationsConstants.Import + "/" + IdRouteSegment; + public const string CompartmentTypeByResourceType = CompartmentTypeRouteSegment + "/" + IdRouteSegment + "/" + CompartmentResourceTypeRouteSegment; public const string Metadata = "metadata"; diff --git a/src/Microsoft.Health.Fhir.Api/Features/Routing/RouteNames.cs b/src/Microsoft.Health.Fhir.Api/Features/Routing/RouteNames.cs index 1173afba8f..a5c61d5c98 100644 --- a/src/Microsoft.Health.Fhir.Api/Features/Routing/RouteNames.cs +++ b/src/Microsoft.Health.Fhir.Api/Features/Routing/RouteNames.cs @@ -37,6 +37,10 @@ internal static class RouteNames internal const string GetReindexStatusById = "GetReindexStatusById"; + internal const string GetImportStatusById = "GetImportStatusById"; + + internal const string CancelImport = "CancelImport"; + internal const string PostBundle = "PostBundle"; internal const string PatientEverythingById = "PatientEverythingById"; diff --git a/src/Microsoft.Health.Fhir.Api/Features/Routing/UrlResolver.cs b/src/Microsoft.Health.Fhir.Api/Features/Routing/UrlResolver.cs index 707763b10e..7ddc506c50 100644 --- a/src/Microsoft.Health.Fhir.Api/Features/Routing/UrlResolver.cs +++ b/src/Microsoft.Health.Fhir.Api/Features/Routing/UrlResolver.cs @@ -250,6 +250,9 @@ public Uri ResolveOperationResultUrl(string operationName, string id) case OperationsConstants.Reindex: routeName = RouteNames.GetReindexStatusById; break; + case OperationsConstants.Import: + routeName = RouteNames.GetImportStatusById; + break; default: throw new OperationNotImplementedException(string.Format(Resources.OperationNotImplemented, operationName)); } diff --git a/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs b/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs index 3958a707a7..a9cd1658d0 100644 --- a/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs +++ b/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs @@ -213,6 +213,33 @@ public static string GeneralTransactionFailedError { } } + /// + /// Looks up a localized string similar to Import request must be specified as a Paramters. The body provided in this request is not valid. . + /// + public static string ImportRequestNotValid { + get { + return ResourceManager.GetString("ImportRequestNotValid", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Value of the following parameter {0} is invalid.. + /// + public static string ImportRequestValueNotValid { + get { + return ResourceManager.GetString("ImportRequestValueNotValid", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Initial import mode is not enabled. Please update service configuration to enable initial import mode.. + /// + public static string InitialImportModeNotEnabled { + get { + return ResourceManager.GetString("InitialImportModeNotEnabled", resourceCulture); + } + } + /// /// Looks up a localized string similar to The input data type '{0}' and default template collection '{1}' are inconsistent.. /// @@ -330,6 +357,15 @@ public static string InvalidTemplateCollectionReference { } } + /// + /// Looks up a localized string similar to Service is locked for initial import mode.. + /// + public static string LockedForInitialImportMode { + get { + return ResourceManager.GetString("LockedForInitialImportMode", resourceCulture); + } + } + /// /// Looks up a localized string similar to $member-match operation parameters must be specified as a FHIR Parameters resource. Provided body in this request is not valid.. /// @@ -402,6 +438,15 @@ public static string NotFoundException { } } + /// + /// Looks up a localized string similar to Only initial load import is supported. Please add "InitialLoad" mode to parameters.. + /// + public static string OnlyInitialImportOperationSupported { + get { + return ResourceManager.GetString("OnlyInitialImportOperationSupported", resourceCulture); + } + } + /// /// Looks up a localized string similar to {0} operation failed for reason: {1}. /// diff --git a/src/Microsoft.Health.Fhir.Api/Resources.resx b/src/Microsoft.Health.Fhir.Api/Resources.resx index 23354a78d9..a2381dc87c 100644 --- a/src/Microsoft.Health.Fhir.Api/Resources.resx +++ b/src/Microsoft.Health.Fhir.Api/Resources.resx @@ -313,6 +313,13 @@ Convert data operation parameters must be specified as a FHIR Parameters resource. The body provided in this request is not valid. + + Value of the following parameter {0} is invalid. + {0} = the configuration name which was invalid + + + Import request must be specified as a Paramters. The body provided in this request is not valid. + The template collection reference '{0}' is invalid. {0}: template collection reference @@ -357,4 +364,13 @@ $member-match operation parameters must be specified as a FHIR Parameters resource. Provided body in this request is not valid. + + Only initial load import is supported. Please add "InitialLoad" mode to parameters. + + + Initial import mode is not enabled. Please update service configuration to enable initial import mode. + + + Service is locked for initial import mode. + diff --git a/src/Microsoft.Health.Fhir.Azure.UnitTests/ContainerRegistry/AzureContainerRegistryAccessTokenProviderTests.cs b/src/Microsoft.Health.Fhir.Azure.UnitTests/ContainerRegistry/AzureContainerRegistryAccessTokenProviderTests.cs index 314fd4e873..f569f7b0df 100644 --- a/src/Microsoft.Health.Fhir.Azure.UnitTests/ContainerRegistry/AzureContainerRegistryAccessTokenProviderTests.cs +++ b/src/Microsoft.Health.Fhir.Azure.UnitTests/ContainerRegistry/AzureContainerRegistryAccessTokenProviderTests.cs @@ -12,8 +12,8 @@ using Microsoft.Health.Fhir.Azure.ContainerRegistry; using Microsoft.Health.Fhir.Azure.ExportDestinationClient; using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Features.Operations; using Microsoft.Health.Fhir.Core.Features.Operations.ConvertData.Models; -using Microsoft.Health.Fhir.Core.Features.Operations.Export.ExportDestinationClient; using NSubstitute; using Xunit; diff --git a/src/Microsoft.Health.Fhir.Azure.UnitTests/ExportDestinationClient/AzureAccessTokenClientInitializerTests.cs b/src/Microsoft.Health.Fhir.Azure.UnitTests/ExportDestinationClient/AzureAccessTokenClientInitializerTests.cs index 6508d0b82b..c33767abc7 100644 --- a/src/Microsoft.Health.Fhir.Azure.UnitTests/ExportDestinationClient/AzureAccessTokenClientInitializerTests.cs +++ b/src/Microsoft.Health.Fhir.Azure.UnitTests/ExportDestinationClient/AzureAccessTokenClientInitializerTests.cs @@ -12,6 +12,7 @@ using Microsoft.Extensions.Options; using Microsoft.Health.Fhir.Azure.ExportDestinationClient; using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Features.Operations; using Microsoft.Health.Fhir.Core.Features.Operations.Export.ExportDestinationClient; using NSubstitute; using Xunit; diff --git a/src/Microsoft.Health.Fhir.Azure.UnitTests/ExportDestinationClient/CloudBlockBlobWrapperTests.cs b/src/Microsoft.Health.Fhir.Azure.UnitTests/ExportDestinationClient/CloudBlockBlobWrapperTests.cs index 8838168106..7c2257f425 100644 --- a/src/Microsoft.Health.Fhir.Azure.UnitTests/ExportDestinationClient/CloudBlockBlobWrapperTests.cs +++ b/src/Microsoft.Health.Fhir.Azure.UnitTests/ExportDestinationClient/CloudBlockBlobWrapperTests.cs @@ -4,8 +4,6 @@ // ------------------------------------------------------------------------------------------------- using System; -using System.Collections.Generic; -using System.Text; using Microsoft.Azure.Storage.Blob; using Microsoft.Health.Fhir.Azure.ExportDestinationClient; using Xunit; diff --git a/src/Microsoft.Health.Fhir.Azure.UnitTests/IntegrationDataStore/AzureBlobIntegrationDataStoreClientTests.cs b/src/Microsoft.Health.Fhir.Azure.UnitTests/IntegrationDataStore/AzureBlobIntegrationDataStoreClientTests.cs new file mode 100644 index 0000000000..71bb5f1df7 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Azure.UnitTests/IntegrationDataStore/AzureBlobIntegrationDataStoreClientTests.cs @@ -0,0 +1,337 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Azure.Storage.Blob; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Microsoft.Health.Fhir.Azure.IntegrationDataStore; +using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Features.Operations; +using Xunit; + +namespace Microsoft.Health.Fhir.Azure.UnitTests.IntegrationDataStore +{ + public class AzureBlobIntegrationDataStoreClientTests + { + [Fact(Skip = "Local tests need emulator.")] + public async Task GivenTextFileOnBlob_WhenDownloadContent_ThenContentShouldBeSame() + { + IIntegrationDataStoreClientInitilizer initializer = GetClientInitializer(); + CloudBlobClient client = await initializer.GetAuthorizedClientAsync(CancellationToken.None); + + string containerName = Guid.NewGuid().ToString("N"); + string blobName = Guid.NewGuid().ToString("N"); + + var container = client.GetContainerReference(containerName); + await container.CreateAsync(); + try + { + using MemoryStream sourceStream = new MemoryStream(); + using StreamWriter writer = new StreamWriter(sourceStream); + + int lineNumber = (1024 * 1024) + 3; + while (lineNumber-- > 0) + { + await writer.WriteLineAsync(Guid.NewGuid().ToString("N")); + } + + await writer.FlushAsync(); + + var blob = container.GetBlockBlobReference(blobName); + sourceStream.Position = 0; + blob.UploadFromStream(sourceStream); + + sourceStream.Position = 0; + AzureBlobIntegrationDataStoreClient blobClient = new AzureBlobIntegrationDataStoreClient(initializer, GetIntegrationDataStoreConfigurationOption(), new NullLogger()); + using Stream targetStream = blobClient.DownloadResource(blob.Uri, 0, CancellationToken.None); + using StreamReader sourceReader = new StreamReader(sourceStream); + using StreamReader targetReader = new StreamReader(targetStream); + + while (!sourceReader.EndOfStream) + { + Assert.Equal(sourceReader.ReadLine(), targetReader.ReadLine()); + } + } + finally + { + await container.DeleteIfExistsAsync(); + } + } + + [Fact(Skip = "Local tests need emulator.")] + public async Task GivenTextFileOnBlob_WhenLoadFromMiddle_ThenContentShouldBeSame() + { + IIntegrationDataStoreClientInitilizer initializer = GetClientInitializer(); + CloudBlobClient client = await initializer.GetAuthorizedClientAsync(CancellationToken.None); + + string containerName = Guid.NewGuid().ToString("N"); + string blobName = Guid.NewGuid().ToString("N"); + + var container = client.GetContainerReference(containerName); + await container.CreateAsync(); + try + { + using MemoryStream sourceStream = new MemoryStream(); + using StreamWriter writer = new StreamWriter(sourceStream); + + int lineNumber = (1024 * 1024) + 3; + while (lineNumber-- > 0) + { + await writer.WriteLineAsync(Guid.NewGuid().ToString("N")); + } + + await writer.FlushAsync(); + + var blob = container.GetBlockBlobReference(blobName); + sourceStream.Position = 0; + blob.UploadFromStream(sourceStream); + + long startPosition = 2021; + sourceStream.Position = startPosition; + AzureBlobIntegrationDataStoreClient blobClient = new AzureBlobIntegrationDataStoreClient(initializer, GetIntegrationDataStoreConfigurationOption(), new NullLogger()); + using Stream targetStream = blobClient.DownloadResource(blob.Uri, startPosition, CancellationToken.None); + using StreamReader sourceReader = new StreamReader(sourceStream); + using StreamReader targetReader = new StreamReader(targetStream); + + while (!sourceReader.EndOfStream) + { + Assert.Equal(sourceReader.ReadLine(), targetReader.ReadLine()); + } + } + finally + { + await container.DeleteIfExistsAsync(); + } + } + + [Fact(Skip = "Local tests need emulator.")] + public async Task GivenBlobUri_WhenCreateContainer_ThenContainerShouldBeCreated() + { + IIntegrationDataStoreClientInitilizer initializer = GetClientInitializer(); + CloudBlobClient client = await initializer.GetAuthorizedClientAsync(CancellationToken.None); + + string containerName = Guid.NewGuid().ToString("N"); + string blobName = Guid.NewGuid().ToString("N"); + + Uri blobUri = new Uri(Path.Combine(client.StorageUri.PrimaryUri.ToString(), $"{containerName}/{blobName}")); + + try + { + AzureBlobIntegrationDataStoreClient blobClient = new AzureBlobIntegrationDataStoreClient(initializer, GetIntegrationDataStoreConfigurationOption(), new NullLogger()); + Uri fileUri = await blobClient.PrepareResourceAsync(containerName, blobName, CancellationToken.None); + Assert.True(await client.GetContainerReference(containerName).ExistsAsync()); + Assert.Equal(blobUri, fileUri); + + await blobClient.PrepareResourceAsync(containerName, blobName, CancellationToken.None); + } + finally + { + var container = client.GetContainerReference(containerName); + await container.DeleteIfExistsAsync(); + } + } + + [Fact(Skip = "Local tests need emulator.")] + public async Task GivenABlob_WhenGetProperties_ThenProtertiesShouldBeReturned() + { + IIntegrationDataStoreClientInitilizer initializer = GetClientInitializer(); + CloudBlobClient client = await initializer.GetAuthorizedClientAsync(CancellationToken.None); + + string containerName = Guid.NewGuid().ToString("N"); + string blobName = Guid.NewGuid().ToString("N"); + + Uri blobUri = new Uri(Path.Combine(client.StorageUri.PrimaryUri.ToString(), $"{containerName}/{blobName}")); + + var container = client.GetContainerReference(containerName); + await container.CreateAsync(); + try + { + using MemoryStream sourceStream = new MemoryStream(); + using StreamWriter writer = new StreamWriter(sourceStream); + + await writer.WriteLineAsync(Guid.NewGuid().ToString("N")); + await writer.FlushAsync(); + + var blob = container.GetBlockBlobReference(blobName); + sourceStream.Position = 0; + blob.UploadFromStream(sourceStream); + + AzureBlobIntegrationDataStoreClient blobClient = new AzureBlobIntegrationDataStoreClient(initializer, GetIntegrationDataStoreConfigurationOption(), new NullLogger()); + Dictionary properties = await blobClient.GetPropertiesAsync(blobUri, CancellationToken.None); + Assert.True(properties.ContainsKey(IntegrationDataStoreClientConstants.BlobPropertyETag)); + Assert.True(properties.ContainsKey(IntegrationDataStoreClientConstants.BlobPropertyLength)); + } + finally + { + await container.DeleteIfExistsAsync(); + } + } + + [Fact(Skip = "Local tests need emulator.")] + public async Task GivenDataStream_WhenUploadToBlob_ThenAllDataShouldBeUploaded() + { + IIntegrationDataStoreClientInitilizer initializer = GetClientInitializer(); + CloudBlobClient client = await initializer.GetAuthorizedClientAsync(CancellationToken.None); + + string containerName = Guid.NewGuid().ToString("N"); + string blobName = Guid.NewGuid().ToString("N"); + + Uri blobUri = new Uri(Path.Combine(client.StorageUri.PrimaryUri.ToString(), $"{containerName}/{blobName}")); + + try + { + AzureBlobIntegrationDataStoreClient blobClient = new AzureBlobIntegrationDataStoreClient(initializer, GetIntegrationDataStoreConfigurationOption(), new NullLogger()); + await blobClient.PrepareResourceAsync(containerName, blobName, CancellationToken.None); + + long count = 30; + List blockIds = new List(); + for (long i = 0; i < count; ++i) + { + using Stream input = new MemoryStream(Encoding.UTF8.GetBytes(i.ToString() + "\r\n")); + string blockId = Convert.ToBase64String(Guid.NewGuid().ToByteArray()); + await blobClient.UploadBlockAsync(blobUri, input, blockId, CancellationToken.None); + blockIds.Add(blockId); + } + + await blobClient.CommitAsync(blobUri, blockIds.ToArray(), CancellationToken.None); + + ICloudBlob output = await client.GetBlobReferenceFromServerAsync(blobUri); + using Stream outputStream = new MemoryStream(); + await output.DownloadToStreamAsync(outputStream); + outputStream.Position = 0; + using StreamReader reader = new StreamReader(outputStream); + + long currentLine = 0; + string content = null; + + while ((content = await reader.ReadLineAsync()) != null) + { + Assert.Equal(currentLine.ToString(), content); + currentLine++; + } + + Assert.Equal(count, currentLine); + } + finally + { + var container = client.GetContainerReference(containerName); + await container.DeleteIfExistsAsync(); + } + } + + [Fact(Skip = "Local tests need emulator.")] + public async Task GivenDataStream_WhenAppendToBlob_ThenDataShouldBeAppended() + { + IIntegrationDataStoreClientInitilizer initializer = GetClientInitializer(); + CloudBlobClient client = await initializer.GetAuthorizedClientAsync(CancellationToken.None); + + string containerName = Guid.NewGuid().ToString("N"); + string blobName = Guid.NewGuid().ToString("N"); + + Uri blobUri = new Uri(Path.Combine(client.StorageUri.PrimaryUri.ToString(), $"{containerName}/{blobName}")); + + try + { + AzureBlobIntegrationDataStoreClient blobClient = new AzureBlobIntegrationDataStoreClient(initializer, GetIntegrationDataStoreConfigurationOption(), new NullLogger()); + await blobClient.PrepareResourceAsync(containerName, blobName, CancellationToken.None); + + long count = 30; + List blockIds = new List(); + for (long i = 0; i < count; ++i) + { + using Stream input = new MemoryStream(Encoding.UTF8.GetBytes(i.ToString() + "\r\n")); + string blockId = Convert.ToBase64String(Guid.NewGuid().ToByteArray()); + await blobClient.UploadBlockAsync(blobUri, input, blockId, CancellationToken.None); + blockIds.Add(blockId); + } + + await blobClient.CommitAsync(blobUri, blockIds.ToArray(), CancellationToken.None); + + ICloudBlob output = await client.GetBlobReferenceFromServerAsync(blobUri); + using Stream outputStream = new MemoryStream(); + await output.DownloadToStreamAsync(outputStream); + outputStream.Position = 0; + using StreamReader reader = new StreamReader(outputStream); + + long currentLine = 0; + string content = null; + + while ((content = await reader.ReadLineAsync()) != null) + { + Assert.Equal(currentLine.ToString(), content); + currentLine++; + } + + Assert.Equal(count, currentLine); + } + finally + { + var container = client.GetContainerReference(containerName); + await container.DeleteIfExistsAsync(); + } + } + + [Fact(Skip = "Local tests need emulator.")] + public async Task GivenStorageBlob_WhenAcquireLease_ThenLeaseIdShouldBeReturned() + { + IIntegrationDataStoreClientInitilizer initializer = GetClientInitializer(); + CloudBlobClient client = await initializer.GetAuthorizedClientAsync(CancellationToken.None); + + string containerName = Guid.NewGuid().ToString("N"); + string blobName = Guid.NewGuid().ToString("N"); + + Uri blobUri = new Uri(Path.Combine(client.StorageUri.PrimaryUri.ToString(), $"{containerName}/{blobName}")); + + try + { + AzureBlobIntegrationDataStoreClient blobClient = new AzureBlobIntegrationDataStoreClient(initializer, GetIntegrationDataStoreConfigurationOption(), new NullLogger()); + await blobClient.PrepareResourceAsync(containerName, blobName, CancellationToken.None); + + long count = 30; + List blockIds = new List(); + for (long i = 0; i < count; ++i) + { + using Stream input = new MemoryStream(Encoding.UTF8.GetBytes(i.ToString() + "\r\n")); + string blockId = Convert.ToBase64String(Guid.NewGuid().ToByteArray()); + await blobClient.UploadBlockAsync(blobUri, input, blockId, CancellationToken.None); + blockIds.Add(blockId); + } + + await blobClient.CommitAsync(blobUri, blockIds.ToArray(), CancellationToken.None); + + string leaseId = await blobClient.TryAcquireLeaseAsync(blobUri, blobName, CancellationToken.None); + Assert.NotNull(leaseId); + string nullLeaseId = await blobClient.TryAcquireLeaseAsync(blobUri, "dummy", CancellationToken.None); + Assert.Null(nullLeaseId); + + await blobClient.TryReleaseLeaseAsync(blobUri, leaseId, CancellationToken.None); + } + finally + { + var container = client.GetContainerReference(containerName); + await container.DeleteIfExistsAsync(); + } + } + + private static IIntegrationDataStoreClientInitilizer GetClientInitializer() + { + return new AzureConnectionStringClientInitializerV2(GetIntegrationDataStoreConfigurationOption(), new NullLogger()); + } + + private static IOptions GetIntegrationDataStoreConfigurationOption() + { + return Options.Create(new IntegrationDataStoreConfiguration() + { + StorageAccountConnection = "UseDevelopmentStorage=true", + }); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Azure/ContainerRegistry/AzureContainerRegistryAccessTokenProvider.cs b/src/Microsoft.Health.Fhir.Azure/ContainerRegistry/AzureContainerRegistryAccessTokenProvider.cs index d17aec2e3b..993921de1f 100644 --- a/src/Microsoft.Health.Fhir.Azure/ContainerRegistry/AzureContainerRegistryAccessTokenProvider.cs +++ b/src/Microsoft.Health.Fhir.Azure/ContainerRegistry/AzureContainerRegistryAccessTokenProvider.cs @@ -13,9 +13,9 @@ using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Features.Operations; using Microsoft.Health.Fhir.Core.Features.Operations.ConvertData; using Microsoft.Health.Fhir.Core.Features.Operations.ConvertData.Models; -using Microsoft.Health.Fhir.Core.Features.Operations.Export.ExportDestinationClient; using Newtonsoft.Json; using Polly; diff --git a/src/Microsoft.Health.Fhir.Azure/ExportDestinationClient/AzureAccessTokenClientInitializer.cs b/src/Microsoft.Health.Fhir.Azure/ExportDestinationClient/AzureAccessTokenClientInitializer.cs index a014c18d65..d304030ac9 100644 --- a/src/Microsoft.Health.Fhir.Azure/ExportDestinationClient/AzureAccessTokenClientInitializer.cs +++ b/src/Microsoft.Health.Fhir.Azure/ExportDestinationClient/AzureAccessTokenClientInitializer.cs @@ -13,6 +13,7 @@ using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Features.Operations; using Microsoft.Health.Fhir.Core.Features.Operations.Export.ExportDestinationClient; namespace Microsoft.Health.Fhir.Azure.ExportDestinationClient diff --git a/src/Microsoft.Health.Fhir.Azure/ExportDestinationClient/AzureAccessTokenProvider.cs b/src/Microsoft.Health.Fhir.Azure/ExportDestinationClient/AzureAccessTokenProvider.cs index 7c0d95c5f1..ec6ba4775c 100644 --- a/src/Microsoft.Health.Fhir.Azure/ExportDestinationClient/AzureAccessTokenProvider.cs +++ b/src/Microsoft.Health.Fhir.Azure/ExportDestinationClient/AzureAccessTokenProvider.cs @@ -9,7 +9,7 @@ using EnsureThat; using Microsoft.Azure.Services.AppAuthentication; using Microsoft.Extensions.Logging; -using Microsoft.Health.Fhir.Core.Features.Operations.Export.ExportDestinationClient; +using Microsoft.Health.Fhir.Core.Features.Operations; namespace Microsoft.Health.Fhir.Azure.ExportDestinationClient { diff --git a/src/Microsoft.Health.Fhir.Azure/FhirServerBuilderAzureRegistrationExtensions.cs b/src/Microsoft.Health.Fhir.Azure/FhirServerBuilderAzureRegistrationExtensions.cs index fa2a94e470..c2283142cf 100644 --- a/src/Microsoft.Health.Fhir.Azure/FhirServerBuilderAzureRegistrationExtensions.cs +++ b/src/Microsoft.Health.Fhir.Azure/FhirServerBuilderAzureRegistrationExtensions.cs @@ -9,6 +9,7 @@ using Microsoft.Health.Extensions.DependencyInjection; using Microsoft.Health.Fhir.Azure.ContainerRegistry; using Microsoft.Health.Fhir.Azure.ExportDestinationClient; +using Microsoft.Health.Fhir.Azure.IntegrationDataStore; using Microsoft.Health.Fhir.Core.Configs; using Microsoft.Health.Fhir.Core.Features.Operations; using Microsoft.Health.Fhir.Core.Features.Operations.ConvertData; @@ -20,6 +21,7 @@ namespace Microsoft.Health.Fhir.Azure public static class FhirServerBuilderAzureRegistrationExtensions { private const string ExportConfigurationName = "FhirServer:Operations:Export"; + private const string IntegrationDataStoreConfigurationName = "FhirServer:Operations:IntegrationDataStore"; public static IFhirServerBuilder AddAzureExportDestinationClient(this IFhirServerBuilder fhirServerBuilder) { @@ -77,5 +79,41 @@ public static IFhirServerBuilder AddContainerRegistryTokenProvider(this IFhirSer return fhirServerBuilder; } + + /// + /// Customer can use this DataStore to integrate with other Azure services for data purpose. + /// + /// Service builder for FHIR server + /// Configuration for FHIR server + public static IFhirServerBuilder AddAzureIntegrationDataStoreClient(this IFhirServerBuilder fhirServerBuilder, IConfiguration configuration) + { + EnsureArg.IsNotNull(fhirServerBuilder, nameof(fhirServerBuilder)); + + var integrationDataStoreConfiguration = new IntegrationDataStoreConfiguration(); + configuration.GetSection(IntegrationDataStoreConfigurationName).Bind(integrationDataStoreConfiguration); + + if (!string.IsNullOrWhiteSpace(integrationDataStoreConfiguration.StorageAccountUri)) + { + fhirServerBuilder.Services.Add() + .Transient() + .AsService>(); + + fhirServerBuilder.Services.Add() + .Transient() + .AsService(); + } + else + { + fhirServerBuilder.Services.Add() + .Transient() + .AsService>(); + } + + fhirServerBuilder.Services.Add() + .Transient() + .AsImplementedInterfaces(); + + return fhirServerBuilder; + } } } diff --git a/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureAccessTokenClientInitializerV2.cs b/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureAccessTokenClientInitializerV2.cs new file mode 100644 index 0000000000..a83d41872c --- /dev/null +++ b/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureAccessTokenClientInitializerV2.cs @@ -0,0 +1,75 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Net; +using System.Threading; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.Azure.Storage.Auth; +using Microsoft.Azure.Storage.Blob; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Features.Operations; + +namespace Microsoft.Health.Fhir.Azure.IntegrationDataStore +{ + public class AzureAccessTokenClientInitializerV2 : IIntegrationDataStoreClientInitilizer + { + private readonly IAccessTokenProvider _accessTokenProvider; + private readonly IntegrationDataStoreConfiguration _integrationDataStoreConfiguration; + private readonly ILogger _logger; + + public AzureAccessTokenClientInitializerV2( + IAccessTokenProvider accessTokenProvider, + IOptions integrationDataStoreConfiguration, + ILogger logger) + { + EnsureArg.IsNotNull(accessTokenProvider, nameof(accessTokenProvider)); + EnsureArg.IsNotNull(integrationDataStoreConfiguration?.Value, nameof(integrationDataStoreConfiguration)); + EnsureArg.IsNotNull(logger, nameof(logger)); + + _accessTokenProvider = accessTokenProvider; + _integrationDataStoreConfiguration = integrationDataStoreConfiguration.Value; + _logger = logger; + } + + public async Task GetAuthorizedClientAsync(CancellationToken cancellationToken) + { + return await GetAuthorizedClientAsync(_integrationDataStoreConfiguration, cancellationToken); + } + + public async Task GetAuthorizedClientAsync(IntegrationDataStoreConfiguration integrationDataStoreConfiguration, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(integrationDataStoreConfiguration.StorageAccountUri)) + { + throw new IntegrationDataStoreClientInitializerException(Resources.InvalidStorageUri, HttpStatusCode.BadRequest); + } + + if (!Uri.TryCreate(integrationDataStoreConfiguration.StorageAccountUri, UriKind.Absolute, out Uri storageAccountUri)) + { + throw new IntegrationDataStoreClientInitializerException(Resources.InvalidStorageUri, HttpStatusCode.BadRequest); + } + + string accessToken; + try + { + accessToken = await _accessTokenProvider.GetAccessTokenForResourceAsync(storageAccountUri, cancellationToken); + } + catch (AccessTokenProviderException atp) + { + _logger.LogError(atp, "Unable to get access token"); + + throw new IntegrationDataStoreClientInitializerException(Resources.CannotGetAccessToken, HttpStatusCode.Unauthorized); + } + +#pragma warning disable CA2000 // Dispose objects before losing scope + StorageCredentials storageCredentials = new StorageCredentials(new TokenCredential(accessToken)); +#pragma warning restore CA2000 // Dispose objects before losing scope + return new CloudBlobClient(storageAccountUri, storageCredentials); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureBlobIntegrationDataStoreClient.cs b/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureBlobIntegrationDataStoreClient.cs new file mode 100644 index 0000000000..918e5c0c71 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureBlobIntegrationDataStoreClient.cs @@ -0,0 +1,251 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net; +using System.Threading; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.Azure.Storage; +using Microsoft.Azure.Storage.Blob; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.Health.Fhir.Azure.ExportDestinationClient; +using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Features.Operations; + +namespace Microsoft.Health.Fhir.Azure.IntegrationDataStore +{ + public class AzureBlobIntegrationDataStoreClient : IIntegrationDataStoreClient + { + private IIntegrationDataStoreClientInitilizer _integrationDataStoreClientInitializer; + private IntegrationStoreRetryExceptionPolicyFactory _integrationStoreRetryExceptionPolicyFactory; + private ILogger _logger; + + public AzureBlobIntegrationDataStoreClient( + IIntegrationDataStoreClientInitilizer integrationDataStoreClientInitializer, + IOptions integrationDataStoreConfiguration, + ILogger logger) + { + EnsureArg.IsNotNull(integrationDataStoreClientInitializer, nameof(integrationDataStoreClientInitializer)); + EnsureArg.IsNotNull(integrationDataStoreConfiguration?.Value, nameof(integrationDataStoreConfiguration)); + EnsureArg.IsNotNull(logger, nameof(logger)); + + _integrationDataStoreClientInitializer = integrationDataStoreClientInitializer; + _integrationStoreRetryExceptionPolicyFactory = new IntegrationStoreRetryExceptionPolicyFactory(integrationDataStoreConfiguration); + _logger = logger; + } + + public Stream DownloadResource(Uri resourceUri, long startOffset, CancellationToken cancellationToken) + { + EnsureArg.IsNotNull(resourceUri, nameof(resourceUri)); + + return new AzureBlobSourceStream(async () => await GetCloudBlobClientFromServerAsync(resourceUri, cancellationToken), startOffset, _logger); + } + + public async Task PrepareResourceAsync(string containerId, string fileName, CancellationToken cancellationToken) + { + EnsureArg.IsNotNullOrEmpty(containerId, nameof(containerId)); + EnsureArg.IsNotNullOrEmpty(fileName, nameof(fileName)); + + try + { + return await _integrationStoreRetryExceptionPolicyFactory + .RetryPolicy + .ExecuteAsync(async () => + { + CloudBlobClient cloudBlobClient = await _integrationDataStoreClientInitializer.GetAuthorizedClientAsync(cancellationToken); + CloudBlobContainer container = cloudBlobClient.GetContainerReference(containerId); + + await container.CreateIfNotExistsAsync(cancellationToken); + + CloudBlob blob = container.GetBlobReference(fileName); + + return blob.Uri; + }); + } + catch (StorageException storageEx) + { + _logger.LogInformation(storageEx, "Failed to create container for {0}:{1}", containerId, fileName); + + HttpStatusCode statusCode = StorageExceptionParser.ParseStorageException(storageEx); + throw new IntegrationDataStoreException(storageEx.Message, statusCode); + } + } + + public async Task UploadBlockAsync(Uri resourceUri, Stream stream, string blockId, CancellationToken cancellationToken) + { + EnsureArg.IsNotNull(resourceUri, nameof(resourceUri)); + EnsureArg.IsNotNull(stream, nameof(stream)); + EnsureArg.IsNotNullOrEmpty(blockId, nameof(blockId)); + + try + { + await _integrationStoreRetryExceptionPolicyFactory + .RetryPolicy + .ExecuteAsync(async () => + { + CloudBlockBlob blob = await GetCloudBlobClientAsync(resourceUri, cancellationToken); + await UploadBlockInternalAsync(blob, stream, blockId, cancellationToken); + }); + } + catch (StorageException storageEx) + { + _logger.LogInformation(storageEx, "Failed to upload data for {0}", resourceUri); + + HttpStatusCode statusCode = StorageExceptionParser.ParseStorageException(storageEx); + throw new IntegrationDataStoreException(storageEx.Message, statusCode); + } + } + + public async Task CommitAsync(Uri resourceUri, string[] blockIds, CancellationToken cancellationToken) + { + EnsureArg.IsNotNull(resourceUri, nameof(resourceUri)); + EnsureArg.IsNotNull(blockIds, nameof(blockIds)); + + try + { + await _integrationStoreRetryExceptionPolicyFactory + .RetryPolicy + .ExecuteAsync(async () => + { + CloudBlockBlob blob = await GetCloudBlobClientAsync(resourceUri, cancellationToken); + await CommitInternalAsync(blob, blockIds, cancellationToken); + }); + } + catch (StorageException storageEx) + { + _logger.LogInformation(storageEx, "Failed to commit for {0}", resourceUri); + + HttpStatusCode statusCode = StorageExceptionParser.ParseStorageException(storageEx); + throw new IntegrationDataStoreException(storageEx.Message, statusCode); + } + } + + public async Task AppendCommitAsync(Uri resourceUri, string[] blockIds, CancellationToken cancellationToken) + { + EnsureArg.IsNotNull(resourceUri, nameof(resourceUri)); + EnsureArg.IsNotNull(blockIds, nameof(blockIds)); + + try + { + await _integrationStoreRetryExceptionPolicyFactory + .RetryPolicy + .ExecuteAsync(async () => + { + CloudBlockBlob blob = await GetCloudBlobClientAsync(resourceUri, cancellationToken); + await AppendCommitInternalAsync(blob, blockIds, cancellationToken); + }); + } + catch (StorageException storageEx) + { + _logger.LogInformation(storageEx, "Failed to append commit for {0}", resourceUri); + + HttpStatusCode statusCode = StorageExceptionParser.ParseStorageException(storageEx); + throw new IntegrationDataStoreException(storageEx.Message, statusCode); + } + } + + public async Task> GetPropertiesAsync(Uri resourceUri, CancellationToken cancellationToken) + { + EnsureArg.IsNotNull(resourceUri, nameof(resourceUri)); + + try + { + return await _integrationStoreRetryExceptionPolicyFactory + .RetryPolicy + .ExecuteAsync(async () => + { + CloudBlobClient cloudBlobClient = await _integrationDataStoreClientInitializer.GetAuthorizedClientAsync(cancellationToken); + ICloudBlob blob = await cloudBlobClient.GetBlobReferenceFromServerAsync(resourceUri); + + Dictionary result = new Dictionary(); + result[IntegrationDataStoreClientConstants.BlobPropertyETag] = blob.Properties.ETag; + result[IntegrationDataStoreClientConstants.BlobPropertyLength] = blob.Properties.Length; + + return result; + }); + } + catch (StorageException storageEx) + { + _logger.LogInformation(storageEx, "Failed to get properties of blob {0}", resourceUri); + + HttpStatusCode statusCode = StorageExceptionParser.ParseStorageException(storageEx); + throw new IntegrationDataStoreException(storageEx.Message, statusCode); + } + } + + public async Task TryAcquireLeaseAsync(Uri resourceUri, string proposedLeaseId, CancellationToken cancellationToken) + { + EnsureArg.IsNotNull(resourceUri, nameof(resourceUri)); + + try + { + CloudBlockBlob blob = await GetCloudBlobClientAsync(resourceUri, cancellationToken); + return await blob.AcquireLeaseAsync(null, proposedLeaseId, cancellationToken); + } + catch (StorageException storageEx) + { + _logger.LogInformation(storageEx, "Failed to acquire lease on the blob {0}", resourceUri); + return null; + } + } + + public async Task TryReleaseLeaseAsync(Uri resourceUri, string leaseId, CancellationToken cancellationToken) + { + EnsureArg.IsNotNull(resourceUri, nameof(resourceUri)); + + try + { + CloudBlockBlob blob = await GetCloudBlobClientAsync(resourceUri, cancellationToken); + await blob.ReleaseLeaseAsync(AccessCondition.GenerateLeaseCondition(leaseId), cancellationToken); + } + catch (Exception storageEx) + { + _logger.LogInformation(storageEx, "Failed to release lease on the blob {0}", resourceUri); + } + } + + private static async Task AppendCommitInternalAsync(CloudBlockBlob blob, string[] blockIds, CancellationToken cancellationToken) + { + IEnumerable blockList = await blob.DownloadBlockListAsync( + BlockListingFilter.Committed, + accessCondition: null, + options: null, + operationContext: null, + cancellationToken); + + List newBlockLists = blockList.Select(b => b.Name).ToList(); + newBlockLists.AddRange(blockIds); + + await CommitInternalAsync(blob, newBlockLists.ToArray(), cancellationToken); + } + + private static async Task UploadBlockInternalAsync(CloudBlockBlob blob, Stream stream, string blockId, CancellationToken cancellationToken) + { + await blob.PutBlockAsync(blockId, stream, contentMD5: null, cancellationToken); + } + + private static async Task CommitInternalAsync(CloudBlockBlob blob, string[] blockIds, CancellationToken cancellationToken) + { + await blob.PutBlockListAsync(blockIds, cancellationToken); + } + + private async Task GetCloudBlobClientAsync(Uri blobUri, CancellationToken cancellationToken) + { + CloudBlobClient cloudBlobClient = await _integrationDataStoreClientInitializer.GetAuthorizedClientAsync(cancellationToken); + return new CloudBlockBlob(blobUri, cloudBlobClient); + } + + private async Task GetCloudBlobClientFromServerAsync(Uri blobUri, CancellationToken cancellationToken) + { + CloudBlobClient cloudBlobClient = await _integrationDataStoreClientInitializer.GetAuthorizedClientAsync(cancellationToken); + return await cloudBlobClient.GetBlobReferenceFromServerAsync(blobUri, cancellationToken); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureBlobSourceStream.cs b/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureBlobSourceStream.cs new file mode 100644 index 0000000000..019c2166c7 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureBlobSourceStream.cs @@ -0,0 +1,212 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.Azure.Storage; +using Microsoft.Azure.Storage.Blob; +using Microsoft.Extensions.Logging; +using Microsoft.IO; +using Polly; + +namespace Microsoft.Health.Fhir.Azure.IntegrationDataStore +{ + public class AzureBlobSourceStream : Stream + { + private const int DefaultConcurrentCount = 3; + public const int DefaultBlockBufferSize = 8 * 1024 * 1024; + + private Func> _blobClientFactory; + private long _startOffset; + private long _position; + private ILogger _logger; + private RecyclableMemoryStreamManager _recyclableMemoryStreamManager; + + private ICloudBlob _blobClient; + private readonly Queue> _downloadTasks = new Queue>(); + + public AzureBlobSourceStream(Func> blobClientFactory, long? startOffset, ILogger logger) + { + EnsureArg.IsNotNull(blobClientFactory, nameof(blobClientFactory)); + EnsureArg.IsNotNull(logger, nameof(logger)); + + _blobClientFactory = blobClientFactory; + _startOffset = startOffset ?? 0; + _position = _startOffset; + _logger = logger; + + _recyclableMemoryStreamManager = new RecyclableMemoryStreamManager(); + } + + public int ConcurrentCount + { + get; + set; + } + + = DefaultConcurrentCount; + + public int BlockBufferSize + { + get; + set; + } + + = DefaultBlockBufferSize; + + public override bool CanRead => true; + + public override long Position + { + get + { + return _position; + } + + set => throw new NotImplementedException(); + } + + public override int Read(byte[] buffer, int offset, int count) + { + int totalBytesRead = 0; + InitializeBlobClient(); + + while (count > 0) + { + TryStartNewDownloadTask(); + + if (_downloadTasks.Count > 0) + { + Task downloadTask = _downloadTasks.Peek(); + downloadTask.Wait(); + + Stream contentStream = downloadTask.Result; + int bytesRead = contentStream.Read(buffer, offset, count); + if (bytesRead == 0) + { + contentStream.Dispose(); + _downloadTasks.Dequeue().Dispose(); + continue; + } + + totalBytesRead += bytesRead; + offset += bytesRead; + count -= bytesRead; + } + else + { + break; + } + } + + _position += offset; + return totalBytesRead; + } + + private int TryStartNewDownloadTask() + { + int newTasksStarted = 0; + while (_downloadTasks.Count < ConcurrentCount) + { + (long position, long? length) nextRange = NextRange(); + + // the range is empty => all data downloaded. + if ((nextRange.length ?? 0) == 0) + { + break; + } + + _downloadTasks.Enqueue(DownloadBlobAsync(nextRange.position, nextRange.length ?? 0)); + newTasksStarted++; + } + + return newTasksStarted; + } + + private async Task DownloadBlobAsync(long offset, long length) + { + return await Policy.Handle() + .WaitAndRetryAsync( + retryCount: 3, + sleepDurationProvider: (retryCount) => TimeSpan.FromSeconds(5 * (retryCount - 1)), + onRetryAsync: async (exception, retryCount) => + { + _logger.LogWarning(exception, "Error while download blobs."); + + await RefreshBlobClientAsync(); + }) + .ExecuteAsync(() => DownloadDataFunc(offset, length)); + } + + private (long offset, long? length) NextRange() + { + long totalLength = _blobClient.Properties.Length; + long? length = null; + long nextPosition = _startOffset; + if (totalLength > _startOffset) + { + length = Math.Min(totalLength - _startOffset, BlockBufferSize); + } + + _startOffset += length ?? 0; + return (nextPosition, length); + } + + private void InitializeBlobClient() + { + if (_blobClient == null) + { + _blobClient = _blobClientFactory().Result; + } + } + + private async Task RefreshBlobClientAsync() + { + _blobClient = await _blobClientFactory(); + } + + private async Task DownloadDataFunc(long offset, long length) + { + var stream = new RecyclableMemoryStream(_recyclableMemoryStreamManager); + await _blobClient.DownloadRangeToStreamAsync(stream, offset, length); + stream.Position = 0; + + return stream; + } + +#pragma warning disable SA1201 // Elements should appear in the correct order + public override bool CanSeek => false; +#pragma warning restore SA1201 // Elements should appear in the correct order + + public override bool CanWrite => false; + +#pragma warning disable CA1065 // Do not raise exceptions in unexpected locations + public override long Length => throw new NotImplementedException(); +#pragma warning restore CA1065 // Do not raise exceptions in unexpected locations + + public override void Flush() + { + throw new NotImplementedException(); + } + + public override long Seek(long offset, SeekOrigin origin) + { + throw new NotImplementedException(); + } + + public override void SetLength(long value) + { + throw new NotImplementedException(); + } + + public override void Write(byte[] buffer, int offset, int count) + { + throw new NotImplementedException(); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureConnectionStringClientInitializerV2.cs b/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureConnectionStringClientInitializerV2.cs new file mode 100644 index 0000000000..49f41f8a7f --- /dev/null +++ b/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/AzureConnectionStringClientInitializerV2.cs @@ -0,0 +1,66 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Net; +using System.Threading; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.Azure.Storage; +using Microsoft.Azure.Storage.Blob; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Features.Operations; + +namespace Microsoft.Health.Fhir.Azure.IntegrationDataStore +{ + public class AzureConnectionStringClientInitializerV2 : IIntegrationDataStoreClientInitilizer + { + private readonly IntegrationDataStoreConfiguration _integrationDataStoreConfiguration; + private readonly ILogger _logger; + + public AzureConnectionStringClientInitializerV2(IOptions integrationDataStoreConfiguration, ILogger logger) + { + EnsureArg.IsNotNull(integrationDataStoreConfiguration?.Value, nameof(integrationDataStoreConfiguration)); + EnsureArg.IsNotNull(logger, nameof(logger)); + + _integrationDataStoreConfiguration = integrationDataStoreConfiguration.Value; + _logger = logger; + } + + public Task GetAuthorizedClientAsync(CancellationToken cancellationToken) + { + return GetAuthorizedClientAsync(_integrationDataStoreConfiguration, cancellationToken); + } + + public Task GetAuthorizedClientAsync(IntegrationDataStoreConfiguration integrationDataStoreConfiguration, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(integrationDataStoreConfiguration.StorageAccountConnection)) + { + throw new IntegrationDataStoreClientInitializerException(Resources.InvalidConnectionSettings, HttpStatusCode.BadRequest); + } + + if (!CloudStorageAccount.TryParse(integrationDataStoreConfiguration.StorageAccountConnection, out CloudStorageAccount cloudAccount)) + { + throw new IntegrationDataStoreClientInitializerException(Resources.InvalidConnectionSettings, HttpStatusCode.BadRequest); + } + + CloudBlobClient blobClient; + try + { + blobClient = cloudAccount.CreateCloudBlobClient(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to create a Cloud Blob Client"); + + throw new IntegrationDataStoreClientInitializerException(Resources.InvalidConnectionSettings, HttpStatusCode.BadRequest); + } + + return Task.FromResult(blobClient); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/IntegrationStoreRetryExceptionPolicyFactory.cs b/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/IntegrationStoreRetryExceptionPolicyFactory.cs new file mode 100644 index 0000000000..15ca5d664f --- /dev/null +++ b/src/Microsoft.Health.Fhir.Azure/IntegrationDataStore/IntegrationStoreRetryExceptionPolicyFactory.cs @@ -0,0 +1,71 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Runtime.ExceptionServices; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.Azure.Storage; +using Microsoft.Extensions.Options; +using Microsoft.Health.Fhir.Core.Configs; +using Polly; +using Polly.Retry; + +namespace Microsoft.Health.Fhir.Azure.IntegrationDataStore +{ + public class IntegrationStoreRetryExceptionPolicyFactory + { + private const string RetryEndTimeContextKey = "RetryEndTime"; + + private AsyncRetryPolicy _retryPolicy; + + public IntegrationStoreRetryExceptionPolicyFactory(IOptions integrationDataStoreConfiguration) + { + EnsureArg.IsNotNull(integrationDataStoreConfiguration?.Value, nameof(integrationDataStoreConfiguration)); + + _retryPolicy = CreateExtendedRetryPolicy(integrationDataStoreConfiguration.Value); + } + + public AsyncRetryPolicy RetryPolicy + { + get + { + return _retryPolicy; + } + } + + private static AsyncRetryPolicy CreateExtendedRetryPolicy(IntegrationDataStoreConfiguration integrationDataStoreConfiguration) + { + return Policy.Handle() + .WaitAndRetryAsync( + retryCount: integrationDataStoreConfiguration.MaxRetryCount, + sleepDurationProvider: (_) => TimeSpan.FromSeconds(integrationDataStoreConfiguration.RetryInternalInSecondes), + onRetryAsync: (e, _, _, ctx) => + { + if (integrationDataStoreConfiguration.MaxWaitTimeInSeconds == -1) + { + // no timeout + return Task.CompletedTask; + } + + if (ctx.TryGetValue(RetryEndTimeContextKey, out var endTimeObj)) + { + if (DateTime.UtcNow >= (DateTime)endTimeObj) + { + ExceptionDispatchInfo.Throw(e); + } + + // otherwise, we have enough time to retry + } + else + { + ctx.Add(RetryEndTimeContextKey, DateTime.UtcNow.AddSeconds(integrationDataStoreConfiguration.MaxWaitTimeInSeconds)); + } + + return Task.CompletedTask; + }); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/CancelImportRequestHandlerTests.cs b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/CancelImportRequestHandlerTests.cs new file mode 100644 index 0000000000..69bc6e6549 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/CancelImportRequestHandlerTests.cs @@ -0,0 +1,116 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Net; +using System.Threading; +using System.Threading.Tasks; +using MediatR; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Health.Extensions.DependencyInjection; +using Microsoft.Health.Fhir.Core.Exceptions; +using Microsoft.Health.Fhir.Core.Extensions; +using Microsoft.Health.Fhir.Core.Features.Operations; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.Core.Features.Security.Authorization; +using Microsoft.Health.Fhir.Core.Messages.Import; +using Microsoft.Health.TaskManagement; +using NSubstitute; +using Xunit; +using TaskStatus = Microsoft.Health.TaskManagement.TaskStatus; + +namespace Microsoft.Health.Fhir.Core.UnitTests.Features.Operations.BulkImport +{ + public class CancelImportRequestHandlerTests + { + private const string TaskId = "taskId"; + + private readonly ITaskManager _taskManager = Substitute.For(); + private readonly IMediator _mediator; + + private readonly CancellationToken _cancellationToken = new CancellationTokenSource().Token; + + private Func _sleepDurationProvider = new Func(retryCount => TimeSpan.FromSeconds(0)); + + public CancelImportRequestHandlerTests() + { + var collection = new ServiceCollection(); + collection + .Add(sp => new CancelImportRequestHandler( + _taskManager, + DisabledFhirAuthorizationService.Instance, + NullLogger.Instance)) + .Singleton() + .AsSelf() + .AsImplementedInterfaces(); + + ServiceProvider provider = collection.BuildServiceProvider(); + _mediator = new Mediator(type => provider.GetService(type)); + } + + [Theory] + [InlineData(TaskStatus.Completed)] + public async Task GivenAFhirMediator_WhenCancelingExistingBulkImportTaskThatHasAlreadyCompleted_ThenConflictStatusCodeShouldBeReturned(TaskStatus taskStatus) + { + OperationFailedException operationFailedException = await Assert.ThrowsAsync(async () => await SetupAndExecuteCancelImportAsync(taskStatus, HttpStatusCode.Conflict)); + + Assert.Equal(HttpStatusCode.Conflict, operationFailedException.ResponseStatusCode); + } + + [Fact] + public async Task GivenAFhirMediator_WhenCancelingExistingBulkImportTaskThatHasAlreadyCanceled_ThenAcceptedCodeShouldBeReturned() + { + await SetupAndExecuteCancelImportAsync(TaskStatus.Queued, HttpStatusCode.Accepted, true); + } + + [Theory] + [InlineData(TaskStatus.Queued)] + [InlineData(TaskStatus.Running)] + public async Task GivenAFhirMediator_WhenCancelingExistingBulkImportTaskThatHasNotCompleted_ThenAcceptedStatusCodeShouldBeReturned(TaskStatus taskStatus) + { + TaskInfo taskInfo = await SetupAndExecuteCancelImportAsync(taskStatus, HttpStatusCode.Accepted); + + await _taskManager.Received(1).CancelTaskAsync(taskInfo.TaskId, _cancellationToken); + } + + [Fact] + public async Task GivenAFhirMediator_WhenCancelingWithNotExistTask_ThenNotFoundShouldBeReturned() + { + _taskManager.CancelTaskAsync(Arg.Any(), _cancellationToken).Returns>(_ => throw new TaskNotExistException("Task not exist.")); + await Assert.ThrowsAsync(async () => await _mediator.CancelImportAsync(TaskId, _cancellationToken)); + } + + private async Task SetupAndExecuteCancelImportAsync(TaskStatus taskStatus, HttpStatusCode expectedStatusCode, bool isCanceled = false) + { + TaskInfo taskInfo = SetupBulkImportTask(taskStatus, isCanceled); + + CancelImportResponse response = await _mediator.CancelImportAsync(TaskId, _cancellationToken); + + Assert.NotNull(response); + Assert.Equal(expectedStatusCode, response.StatusCode); + + return taskInfo; + } + + private TaskInfo SetupBulkImportTask(TaskStatus taskStatus, bool isCanceled) + { + var taskInfo = new TaskInfo + { + TaskId = TaskId, + QueueId = "0", + Status = taskStatus, + TaskTypeId = ImportProcessingTask.ImportProcessingTaskId, + InputData = string.Empty, + IsCanceled = isCanceled, + }; + + _taskManager.GetTaskAsync(TaskId, _cancellationToken).Returns(taskInfo); + _taskManager.CancelTaskAsync(TaskId, _cancellationToken).Returns(taskInfo); + + return taskInfo; + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs new file mode 100644 index 0000000000..c91dd837d9 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs @@ -0,0 +1,126 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Net; +using System.Threading; +using System.Threading.Tasks; +using MediatR; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Health.Extensions.DependencyInjection; +using Microsoft.Health.Fhir.Core.Exceptions; +using Microsoft.Health.Fhir.Core.Extensions; +using Microsoft.Health.Fhir.Core.Features.Operations; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.Core.Features.Security.Authorization; +using Microsoft.Health.Fhir.Core.Messages.Import; +using Microsoft.Health.TaskManagement; +using Newtonsoft.Json; +using NSubstitute; +using Xunit; +using TaskStatus = Microsoft.Health.TaskManagement.TaskStatus; + +namespace Microsoft.Health.Fhir.Core.UnitTests.Features.Operations.BulkImport +{ + public class GetImportRequestHandlerTests + { + private const string TaskId = "taskId"; + private readonly ITaskManager _taskManager = Substitute.For(); + private readonly IMediator _mediator; + private readonly Uri _createRequestUri = new Uri("https://localhost/$import/"); + private HttpStatusCode _failureStatusCode = HttpStatusCode.BadRequest; + + public GetImportRequestHandlerTests() + { + var collection = new ServiceCollection(); + collection.Add(x => new GetImportRequestHandler(_taskManager, DisabledFhirAuthorizationService.Instance)).Singleton().AsSelf().AsImplementedInterfaces(); + + ServiceProvider provider = collection.BuildServiceProvider(); + _mediator = new Mediator(type => provider.GetService(type)); + } + + [Fact] + public async Task GivenAFhirMediator_WhenGettingAnExistingBulkImportTaskWithCompletedStatus_ThenHttpResponseCodeShouldBeOk() + { + ImportTaskResult expectedResult = new ImportTaskResult(); + expectedResult.Request = "test"; + + TaskResultData taskResultData = new TaskResultData() + { + Result = TaskResult.Success, + ResultData = JsonConvert.SerializeObject(expectedResult), + }; + + GetImportResponse result = await SetupAndExecuteGetBulkImportTaskByIdAsync(TaskStatus.Completed, false, taskResultData); + + Assert.Equal(HttpStatusCode.OK, result.StatusCode); + Assert.NotNull(result.TaskResult); + } + + [Fact] + public async Task GivenAFhirMediator_WhenGettingAnCompletedImportTaskWithFailure_ThenHttpResponseCodeShouldBeExpected() + { + ImportTaskErrorResult expectedResult = new ImportTaskErrorResult(); + expectedResult.HttpStatusCode = HttpStatusCode.BadRequest; + + TaskResultData taskResultData = new TaskResultData() + { + Result = TaskResult.Fail, + ResultData = JsonConvert.SerializeObject(expectedResult), + }; + + OperationFailedException ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportTaskByIdAsync(TaskStatus.Completed, false, taskResultData)); + + Assert.Equal(HttpStatusCode.BadRequest, ofe.ResponseStatusCode); + Assert.NotNull(ofe.Message); + } + + [Fact] + public async Task GivenAFhirMediator_WhenGettingAnExistingBulkImportTaskThatWasCanceled_ThenOperationFailedExceptionIsThrownWithBadRequestHttpResponseCode() + { + OperationFailedException ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportTaskByIdAsync(TaskStatus.Queued, true)); + + Assert.NotNull(ofe); + Assert.Equal(_failureStatusCode, ofe.ResponseStatusCode); + } + + [Theory] + [InlineData(TaskStatus.Running)] + [InlineData(TaskStatus.Queued)] + public async Task GivenAFhirMediator_WhenGettingAnExistingBulkImportTaskWithNotCompletedStatus_ThenHttpResponseCodeShouldBeAccepted(TaskStatus taskStatus) + { + GetImportResponse result = await SetupAndExecuteGetBulkImportTaskByIdAsync(taskStatus); + + Assert.Equal(HttpStatusCode.Accepted, result.StatusCode); + Assert.Null(result.TaskResult); + } + + [Fact] + public async Task GivenAFhirMediator_WhenGettingWithNotExistTask_ThenNotFoundShouldBeReturned() + { + _taskManager.GetTaskAsync(Arg.Any(), Arg.Any()).Returns(Task.FromResult(null)); + await Assert.ThrowsAsync(async () => await _mediator.GetImportStatusAsync(TaskId, CancellationToken.None)); + } + + private async Task SetupAndExecuteGetBulkImportTaskByIdAsync(TaskStatus taskStatus, bool isCanceled = false, TaskResultData resultData = null) + { + // Result may be changed to real style result later + var taskInfo = new TaskInfo + { + TaskId = TaskId, + QueueId = "0", + Status = taskStatus, + TaskTypeId = ImportProcessingTask.ImportProcessingTaskId, + InputData = string.Empty, + IsCanceled = isCanceled, + Result = resultData != null ? JsonConvert.SerializeObject(resultData) : string.Empty, + }; + + _taskManager.GetTaskAsync(taskInfo.TaskId, Arg.Any()).Returns(taskInfo); + + return await _mediator.GetImportStatusAsync(taskInfo.TaskId, CancellationToken.None); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/ImportOrchestratorTaskTests.cs b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/ImportOrchestratorTaskTests.cs new file mode 100644 index 0000000000..4948f34e54 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/ImportOrchestratorTaskTests.cs @@ -0,0 +1,773 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Health.Core; +using Microsoft.Health.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Features.Operations; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.Core.Features.Operations.Import.Models; +using Microsoft.Health.TaskManagement; +using Newtonsoft.Json; +using NSubstitute; +using Xunit; + +namespace Microsoft.Health.Fhir.Core.UnitTests.Features.Operations.Import +{ + public class ImportOrchestratorTaskTests + { + [Fact] + public async Task GivenAnOrchestratorTask_WhenProcessingInputFilesMoreThanConcurrentCount_ThenTaskShouldBeCompleted() + { + await VerifyCommonOrchestratorTaskAsync(105, 6); + } + + [Fact] + public async Task GivenAnOrchestratorTask_WhenProcessingInputFilesEqualsConcurrentCount_ThenTaskShouldBeCompleted() + { + await VerifyCommonOrchestratorTaskAsync(105, 105); + } + + [Fact] + public async Task GivenAnOrchestratorTask_WhenProcessingInputFilesLessThanConcurrentCount_ThenTaskShouldBeCompleted() + { + await VerifyCommonOrchestratorTaskAsync(11, 105); + } + + [Fact] + public async Task GivenAnOrchestratorTask_WhenResumeFromFailure_ThenTaskShouldBeCompleted() + { + await VerifyCommonOrchestratorTaskAsync(105, 6, 10); + } + + [Fact] + public async Task GivenAnOrchestratorTaskAndWrongEtag_WhenOrchestratorTaskStart_ThenTaskShouldFailedWithDetails() + { + IImportOrchestratorTaskDataStoreOperation fhirDataBulkImportOperation = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); + ISequenceIdGenerator sequenceIdGenerator = Substitute.For>(); + ImportOrchestratorTaskInputData importOrchestratorTaskInputData = new ImportOrchestratorTaskInputData(); + ImportOrchestratorTaskContext importOrchestratorTaskContext = new ImportOrchestratorTaskContext(); + ITaskManager taskManager = Substitute.For(); + + importOrchestratorTaskInputData.TaskId = Guid.NewGuid().ToString("N"); + importOrchestratorTaskInputData.TaskCreateTime = Clock.UtcNow; + importOrchestratorTaskInputData.BaseUri = new Uri("http://dummy"); + var inputs = new List(); + inputs.Add(new InputResource() { Type = "Resource", Url = new Uri("http://dummy"), Etag = "dummy" }); + importOrchestratorTaskInputData.Input = inputs; + importOrchestratorTaskInputData.InputFormat = "ndjson"; + importOrchestratorTaskInputData.InputSource = new Uri("http://dummy"); + importOrchestratorTaskInputData.MaxConcurrentProcessingTaskCount = 1; + importOrchestratorTaskInputData.ProcessingTaskQueueId = "default"; + importOrchestratorTaskInputData.RequestUri = new Uri("http://dummy"); + + integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + Dictionary properties = new Dictionary(); + properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; + properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; + return properties; + }); + + sequenceIdGenerator.GetCurrentSequenceId().Returns(_ => 0L); + + ImportOrchestratorTask orchestratorTask = new ImportOrchestratorTask( + importOrchestratorTaskInputData, + importOrchestratorTaskContext, + taskManager, + sequenceIdGenerator, + contextUpdater, + contextAccessor, + fhirDataBulkImportOperation, + integrationDataStoreClient, + loggerFactory); + + TaskResultData result = await orchestratorTask.ExecuteAsync(); + ImportTaskErrorResult resultDetails = JsonConvert.DeserializeObject(result.ResultData); + + Assert.Equal(TaskResult.Fail, result.Result); + Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); + Assert.NotEmpty(resultDetails.ErrorMessage); + } + + [Fact] + public async Task GivenAnOrchestratorTask_WhenIntegrationExceptionThrow_ThenTaskShouldFailedWithDetails() + { + IImportOrchestratorTaskDataStoreOperation fhirDataBulkImportOperation = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); + ISequenceIdGenerator sequenceIdGenerator = Substitute.For>(); + ImportOrchestratorTaskInputData importOrchestratorTaskInputData = new ImportOrchestratorTaskInputData(); + ImportOrchestratorTaskContext importOrchestratorTaskContext = new ImportOrchestratorTaskContext(); + ITaskManager taskManager = Substitute.For(); + + importOrchestratorTaskInputData.TaskId = Guid.NewGuid().ToString("N"); + importOrchestratorTaskInputData.TaskCreateTime = Clock.UtcNow; + importOrchestratorTaskInputData.BaseUri = new Uri("http://dummy"); + var inputs = new List(); + inputs.Add(new InputResource() { Type = "Resource", Url = new Uri("http://dummy"), Etag = "dummy" }); + importOrchestratorTaskInputData.Input = inputs; + importOrchestratorTaskInputData.InputFormat = "ndjson"; + importOrchestratorTaskInputData.InputSource = new Uri("http://dummy"); + importOrchestratorTaskInputData.MaxConcurrentProcessingTaskCount = 1; + importOrchestratorTaskInputData.ProcessingTaskQueueId = "default"; + importOrchestratorTaskInputData.RequestUri = new Uri("http://dummy"); + + integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) + .Returns>>(_ => + { + throw new IntegrationDataStoreException("dummy", HttpStatusCode.Unauthorized); + }); + + sequenceIdGenerator.GetCurrentSequenceId().Returns(_ => 0L); + + ImportOrchestratorTask orchestratorTask = new ImportOrchestratorTask( + importOrchestratorTaskInputData, + importOrchestratorTaskContext, + taskManager, + sequenceIdGenerator, + contextUpdater, + contextAccessor, + fhirDataBulkImportOperation, + integrationDataStoreClient, + loggerFactory); + + TaskResultData result = await orchestratorTask.ExecuteAsync(); + ImportTaskErrorResult resultDetails = JsonConvert.DeserializeObject(result.ResultData); + + Assert.Equal(TaskResult.Fail, result.Result); + Assert.Equal(HttpStatusCode.Unauthorized, resultDetails.HttpStatusCode); + Assert.NotEmpty(resultDetails.ErrorMessage); + } + + [Fact] + public async Task GivenAnOrchestratorTask_WhenFailedAtPreprocessStep_ThenRetrableExceptionShouldBeThrowAndContextUpdated() + { + IImportOrchestratorTaskDataStoreOperation fhirDataBulkImportOperation = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); + ISequenceIdGenerator sequenceIdGenerator = Substitute.For>(); + ImportOrchestratorTaskInputData importOrchestratorTaskInputData = new ImportOrchestratorTaskInputData(); + ImportOrchestratorTaskContext importOrchestratorTaskContext = new ImportOrchestratorTaskContext(); + List<(long begin, long end)> surrogatedIdRanges = new List<(long begin, long end)>(); + TestTaskManager taskManager = new TestTaskManager(t => + { + if (t == null) + { + return null; + } + + t.Status = TaskManagement.TaskStatus.Running; + return t; + }); + + importOrchestratorTaskInputData.TaskId = Guid.NewGuid().ToString("N"); + importOrchestratorTaskInputData.TaskCreateTime = Clock.UtcNow; + importOrchestratorTaskInputData.BaseUri = new Uri("http://dummy"); + var inputs = new List(); + inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy") }); + + importOrchestratorTaskInputData.Input = inputs; + importOrchestratorTaskInputData.InputFormat = "ndjson"; + importOrchestratorTaskInputData.InputSource = new Uri("http://dummy"); + importOrchestratorTaskInputData.MaxConcurrentProcessingTaskCount = 1; + importOrchestratorTaskInputData.ProcessingTaskQueueId = "default"; + importOrchestratorTaskInputData.RequestUri = new Uri("http://dummy"); + + integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + Dictionary properties = new Dictionary(); + properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; + properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; + return properties; + }); + + fhirDataBulkImportOperation.PreprocessAsync(Arg.Any()) + .Returns(_ => + { + throw new InvalidCastException(); + }); + + string latestContext = null; + contextUpdater.UpdateContextAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + latestContext = (string)callInfo[0]; + return Task.CompletedTask; + }); + + sequenceIdGenerator.GetCurrentSequenceId().Returns(_ => 0L); + + ImportOrchestratorTask orchestratorTask = new ImportOrchestratorTask( + importOrchestratorTaskInputData, + importOrchestratorTaskContext, + taskManager, + sequenceIdGenerator, + contextUpdater, + contextAccessor, + fhirDataBulkImportOperation, + integrationDataStoreClient, + loggerFactory); + orchestratorTask.PollingFrequencyInSeconds = 0; + + await Assert.ThrowsAnyAsync(() => orchestratorTask.ExecuteAsync()); + ImportOrchestratorTaskContext context = JsonConvert.DeserializeObject(latestContext); + Assert.Equal(ImportOrchestratorTaskProgress.InputResourcesValidated, context.Progress); + } + + [Fact] + public async Task GivenAnOrchestratorTask_WhenFailedAtGenerateSubTasksStep_ThenRetrableExceptionShouldBeThrowAndContextUpdated() + { + IImportOrchestratorTaskDataStoreOperation fhirDataBulkImportOperation = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); + ISequenceIdGenerator sequenceIdGenerator = Substitute.For>(); + ImportOrchestratorTaskInputData importOrchestratorTaskInputData = new ImportOrchestratorTaskInputData(); + ImportOrchestratorTaskContext importOrchestratorTaskContext = new ImportOrchestratorTaskContext(); + List<(long begin, long end)> surrogatedIdRanges = new List<(long begin, long end)>(); + TestTaskManager taskManager = new TestTaskManager(t => + { + if (t == null) + { + return null; + } + + t.Status = TaskManagement.TaskStatus.Running; + return t; + }); + + importOrchestratorTaskInputData.TaskId = Guid.NewGuid().ToString("N"); + importOrchestratorTaskInputData.TaskCreateTime = Clock.UtcNow; + importOrchestratorTaskInputData.BaseUri = new Uri("http://dummy"); + var inputs = new List(); + inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy") }); + + importOrchestratorTaskInputData.Input = inputs; + importOrchestratorTaskInputData.InputFormat = "ndjson"; + importOrchestratorTaskInputData.InputSource = new Uri("http://dummy"); + importOrchestratorTaskInputData.MaxConcurrentProcessingTaskCount = 1; + importOrchestratorTaskInputData.ProcessingTaskQueueId = "default"; + importOrchestratorTaskInputData.RequestUri = new Uri("http://dummy"); + + integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + Dictionary properties = new Dictionary(); + properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; + properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; + return properties; + }); + + string latestContext = null; + contextUpdater.UpdateContextAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + latestContext = (string)callInfo[0]; + return Task.CompletedTask; + }); + + sequenceIdGenerator.GetCurrentSequenceId().Returns(_ => throw new InvalidOperationException()); + + ImportOrchestratorTask orchestratorTask = new ImportOrchestratorTask( + importOrchestratorTaskInputData, + importOrchestratorTaskContext, + taskManager, + sequenceIdGenerator, + contextUpdater, + contextAccessor, + fhirDataBulkImportOperation, + integrationDataStoreClient, + loggerFactory); + orchestratorTask.PollingFrequencyInSeconds = 0; + + await Assert.ThrowsAnyAsync(() => orchestratorTask.ExecuteAsync()); + ImportOrchestratorTaskContext context = JsonConvert.DeserializeObject(latestContext); + Assert.Equal(ImportOrchestratorTaskProgress.PreprocessCompleted, context.Progress); + } + + [Fact] + public async Task GivenAnOrchestratorTask_WhenFailedAtMonitorSubTasksStep_ThenRetrableExceptionShouldBeThrowAndContextUpdated() + { + IImportOrchestratorTaskDataStoreOperation fhirDataBulkImportOperation = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); + ISequenceIdGenerator sequenceIdGenerator = Substitute.For>(); + ImportOrchestratorTaskInputData importOrchestratorTaskInputData = new ImportOrchestratorTaskInputData(); + ImportOrchestratorTaskContext importOrchestratorTaskContext = new ImportOrchestratorTaskContext(); + List<(long begin, long end)> surrogatedIdRanges = new List<(long begin, long end)>(); + TestTaskManager taskManager = new TestTaskManager(t => + { + throw new InvalidOperationException(); + }); + + importOrchestratorTaskInputData.TaskId = Guid.NewGuid().ToString("N"); + importOrchestratorTaskInputData.TaskCreateTime = Clock.UtcNow; + importOrchestratorTaskInputData.BaseUri = new Uri("http://dummy"); + var inputs = new List(); + inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy") }); + + importOrchestratorTaskInputData.Input = inputs; + importOrchestratorTaskInputData.InputFormat = "ndjson"; + importOrchestratorTaskInputData.InputSource = new Uri("http://dummy"); + importOrchestratorTaskInputData.MaxConcurrentProcessingTaskCount = 1; + importOrchestratorTaskInputData.ProcessingTaskQueueId = "default"; + importOrchestratorTaskInputData.RequestUri = new Uri("http://dummy"); + + integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + Dictionary properties = new Dictionary(); + properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; + properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; + return properties; + }); + + string latestContext = null; + contextUpdater.UpdateContextAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + latestContext = (string)callInfo[0]; + return Task.CompletedTask; + }); + + sequenceIdGenerator.GetCurrentSequenceId().Returns(_ => 0L); + + ImportOrchestratorTask orchestratorTask = new ImportOrchestratorTask( + importOrchestratorTaskInputData, + importOrchestratorTaskContext, + taskManager, + sequenceIdGenerator, + contextUpdater, + contextAccessor, + fhirDataBulkImportOperation, + integrationDataStoreClient, + loggerFactory); + orchestratorTask.PollingFrequencyInSeconds = 0; + + await Assert.ThrowsAnyAsync(() => orchestratorTask.ExecuteAsync()); + ImportOrchestratorTaskContext context = JsonConvert.DeserializeObject(latestContext); + Assert.Equal(ImportOrchestratorTaskProgress.SubTaskRecordsGenerated, context.Progress); + } + + [Fact] + public async Task GivenAnOrchestratorTask_WhenSubTaskFailed_ThenImportProcessingExceptionShouldBeThrowAndContextUpdated() + { + IImportOrchestratorTaskDataStoreOperation fhirDataBulkImportOperation = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); + ISequenceIdGenerator sequenceIdGenerator = Substitute.For>(); + ImportOrchestratorTaskInputData importOrchestratorTaskInputData = new ImportOrchestratorTaskInputData(); + ImportOrchestratorTaskContext importOrchestratorTaskContext = new ImportOrchestratorTaskContext(); + List<(long begin, long end)> surrogatedIdRanges = new List<(long begin, long end)>(); + TestTaskManager taskManager = new TestTaskManager(t => + { + if (t == null) + { + return null; + } + + TaskResultData resultData = new TaskResultData(); + resultData.Result = TaskResult.Fail; + resultData.ResultData = "error"; + + t.Result = JsonConvert.SerializeObject(resultData); + t.Status = TaskManagement.TaskStatus.Completed; + + return t; + }); + + importOrchestratorTaskInputData.TaskId = Guid.NewGuid().ToString("N"); + importOrchestratorTaskInputData.TaskCreateTime = Clock.UtcNow; + importOrchestratorTaskInputData.BaseUri = new Uri("http://dummy"); + var inputs = new List(); + inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy") }); + + importOrchestratorTaskInputData.Input = inputs; + importOrchestratorTaskInputData.InputFormat = "ndjson"; + importOrchestratorTaskInputData.InputSource = new Uri("http://dummy"); + importOrchestratorTaskInputData.MaxConcurrentProcessingTaskCount = 1; + importOrchestratorTaskInputData.ProcessingTaskQueueId = "default"; + importOrchestratorTaskInputData.RequestUri = new Uri("http://dummy"); + + integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + Dictionary properties = new Dictionary(); + properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; + properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; + return properties; + }); + + string latestContext = null; + contextUpdater.UpdateContextAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + latestContext = (string)callInfo[0]; + return Task.CompletedTask; + }); + + sequenceIdGenerator.GetCurrentSequenceId().Returns(_ => 0L); + + ImportOrchestratorTask orchestratorTask = new ImportOrchestratorTask( + importOrchestratorTaskInputData, + importOrchestratorTaskContext, + taskManager, + sequenceIdGenerator, + contextUpdater, + contextAccessor, + fhirDataBulkImportOperation, + integrationDataStoreClient, + loggerFactory); + orchestratorTask.PollingFrequencyInSeconds = 0; + + TaskResultData taskResultData = await orchestratorTask.ExecuteAsync(); + Assert.Equal(TaskResult.Fail, taskResultData.Result); + + ImportOrchestratorTaskContext context = JsonConvert.DeserializeObject(latestContext); + Assert.Equal(ImportOrchestratorTaskProgress.SubTaskRecordsGenerated, context.Progress); + } + + [Fact] + public async Task GivenAnOrchestratorTask_WhenFailedAtPostProcessStep_ThenRetrableExceptionShouldBeThrowAndContextUpdated() + { + IImportOrchestratorTaskDataStoreOperation fhirDataBulkImportOperation = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); + ISequenceIdGenerator sequenceIdGenerator = Substitute.For>(); + ImportOrchestratorTaskInputData importOrchestratorTaskInputData = new ImportOrchestratorTaskInputData(); + ImportOrchestratorTaskContext importOrchestratorTaskContext = new ImportOrchestratorTaskContext(); + List<(long begin, long end)> surrogatedIdRanges = new List<(long begin, long end)>(); + TestTaskManager taskManager = new TestTaskManager(t => + { + if (t == null) + { + return null; + } + + ImportProcessingTaskInputData processingInput = JsonConvert.DeserializeObject(t.InputData); + ImportProcessingTaskResult processingResult = new ImportProcessingTaskResult(); + processingResult.ResourceType = processingInput.ResourceType; + processingResult.SucceedCount = 1; + processingResult.FailedCount = 1; + processingResult.ErrorLogLocation = "http://dummy/error"; + surrogatedIdRanges.Add((processingInput.BeginSequenceId, processingInput.EndSequenceId)); + + t.Result = JsonConvert.SerializeObject(new TaskResultData(TaskResult.Success, JsonConvert.SerializeObject(processingResult))); + t.Status = TaskManagement.TaskStatus.Completed; + return t; + }); + + importOrchestratorTaskInputData.TaskId = Guid.NewGuid().ToString("N"); + importOrchestratorTaskInputData.TaskCreateTime = Clock.UtcNow; + importOrchestratorTaskInputData.BaseUri = new Uri("http://dummy"); + var inputs = new List(); + inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy") }); + + importOrchestratorTaskInputData.Input = inputs; + importOrchestratorTaskInputData.InputFormat = "ndjson"; + importOrchestratorTaskInputData.InputSource = new Uri("http://dummy"); + importOrchestratorTaskInputData.MaxConcurrentProcessingTaskCount = 1; + importOrchestratorTaskInputData.ProcessingTaskQueueId = "default"; + importOrchestratorTaskInputData.RequestUri = new Uri("http://dummy"); + + integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + Dictionary properties = new Dictionary(); + properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; + properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; + return properties; + }); + + string latestContext = null; + contextUpdater.UpdateContextAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + latestContext = (string)callInfo[0]; + return Task.CompletedTask; + }); + + fhirDataBulkImportOperation.PostprocessAsync(Arg.Any()) + .Returns(_ => + { + throw new InvalidCastException(); + }); + + sequenceIdGenerator.GetCurrentSequenceId().Returns(_ => 0L); + + ImportOrchestratorTask orchestratorTask = new ImportOrchestratorTask( + importOrchestratorTaskInputData, + importOrchestratorTaskContext, + taskManager, + sequenceIdGenerator, + contextUpdater, + contextAccessor, + fhirDataBulkImportOperation, + integrationDataStoreClient, + loggerFactory); + orchestratorTask.PollingFrequencyInSeconds = 0; + + await Assert.ThrowsAnyAsync(() => orchestratorTask.ExecuteAsync()); + ImportOrchestratorTaskContext context = JsonConvert.DeserializeObject(latestContext); + Assert.Equal(ImportOrchestratorTaskProgress.SubTasksCompleted, context.Progress); + Assert.Equal(1, context.DataProcessingTasks.Count); + } + + [Fact] + public async Task GivenAnOrchestratorTask_WhenCancelBefore_ThenCanceledResultShouldBeReturn() + { + IImportOrchestratorTaskDataStoreOperation fhirDataBulkImportOperation = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); + ISequenceIdGenerator sequenceIdGenerator = Substitute.For>(); + ImportOrchestratorTaskInputData importOrchestratorTaskInputData = new ImportOrchestratorTaskInputData(); + ImportOrchestratorTaskContext importOrchestratorTaskContext = new ImportOrchestratorTaskContext(); + List<(long begin, long end)> surrogatedIdRanges = new List<(long begin, long end)>(); + TestTaskManager taskManager = new TestTaskManager(t => + { + if (t == null) + { + return null; + } + + ImportProcessingTaskInputData processingInput = JsonConvert.DeserializeObject(t.InputData); + ImportProcessingTaskResult processingResult = new ImportProcessingTaskResult(); + processingResult.ResourceType = processingInput.ResourceType; + processingResult.SucceedCount = 1; + processingResult.FailedCount = 1; + processingResult.ErrorLogLocation = "http://dummy/error"; + surrogatedIdRanges.Add((processingInput.BeginSequenceId, processingInput.EndSequenceId)); + + t.Result = JsonConvert.SerializeObject(new TaskResultData(TaskResult.Success, JsonConvert.SerializeObject(processingResult))); + t.Status = TaskManagement.TaskStatus.Completed; + return t; + }); + + importOrchestratorTaskInputData.TaskId = Guid.NewGuid().ToString("N"); + importOrchestratorTaskInputData.TaskCreateTime = Clock.UtcNow; + importOrchestratorTaskInputData.BaseUri = new Uri("http://dummy"); + var inputs = new List(); + inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy") }); + + importOrchestratorTaskInputData.Input = inputs; + importOrchestratorTaskInputData.InputFormat = "ndjson"; + importOrchestratorTaskInputData.InputSource = new Uri("http://dummy"); + importOrchestratorTaskInputData.MaxConcurrentProcessingTaskCount = 1; + importOrchestratorTaskInputData.ProcessingTaskQueueId = "default"; + importOrchestratorTaskInputData.RequestUri = new Uri("http://dummy"); + + integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + Dictionary properties = new Dictionary(); + properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; + properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; + return properties; + }); + + string latestContext = null; + contextUpdater.UpdateContextAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + latestContext = (string)callInfo[0]; + return Task.CompletedTask; + }); + + sequenceIdGenerator.GetCurrentSequenceId().Returns(_ => 0L); + + ImportOrchestratorTask orchestratorTask = new ImportOrchestratorTask( + importOrchestratorTaskInputData, + importOrchestratorTaskContext, + taskManager, + sequenceIdGenerator, + contextUpdater, + contextAccessor, + fhirDataBulkImportOperation, + integrationDataStoreClient, + loggerFactory); + orchestratorTask.PollingFrequencyInSeconds = 0; + + orchestratorTask.Cancel(); + TaskResultData taskResult = await orchestratorTask.ExecuteAsync(); + Assert.Equal(TaskResult.Canceled, taskResult.Result); + } + + private static async Task VerifyCommonOrchestratorTaskAsync(int inputFileCount, int concurrentCount, int resumeFrom = -1) + { + IImportOrchestratorTaskDataStoreOperation fhirDataBulkImportOperation = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); + ISequenceIdGenerator sequenceIdGenerator = Substitute.For>(); + ImportOrchestratorTaskInputData importOrchestratorTaskInputData = new ImportOrchestratorTaskInputData(); + ImportOrchestratorTaskContext importOrchestratorTaskContext = new ImportOrchestratorTaskContext(); + List<(long begin, long end)> surrogatedIdRanges = new List<(long begin, long end)>(); + TestTaskManager taskManager = new TestTaskManager(t => + { + if (t == null) + { + return null; + } + + if (t.Status == TaskManagement.TaskStatus.Completed) + { + return t; + } + + ImportProcessingTaskInputData processingInput = JsonConvert.DeserializeObject(t.InputData); + ImportProcessingTaskResult processingResult = new ImportProcessingTaskResult(); + processingResult.ResourceType = processingInput.ResourceType; + processingResult.SucceedCount = 1; + processingResult.FailedCount = 1; + processingResult.ErrorLogLocation = "http://dummy/error"; + surrogatedIdRanges.Add((processingInput.BeginSequenceId, processingInput.EndSequenceId)); + + t.Result = JsonConvert.SerializeObject(new TaskResultData(TaskResult.Success, JsonConvert.SerializeObject(processingResult))); + t.Status = TaskManagement.TaskStatus.Completed; + return t; + }); + + importOrchestratorTaskInputData.TaskId = Guid.NewGuid().ToString("N"); + importOrchestratorTaskInputData.TaskCreateTime = Clock.UtcNow; + importOrchestratorTaskInputData.BaseUri = new Uri("http://dummy"); + var inputs = new List(); + + bool resumeMode = resumeFrom >= 0; + for (int i = 0; i < inputFileCount; ++i) + { + string location = $"http://dummy/{i}"; + inputs.Add(new InputResource() { Type = "Resource", Url = new Uri(location) }); + + if (resumeMode) + { + if (i <= resumeFrom) + { + TaskInfo taskInfo = new TaskInfo(); + taskInfo.TaskId = Guid.NewGuid().ToString("N"); + + ImportProcessingTaskResult processingResult = new ImportProcessingTaskResult(); + processingResult.ResourceType = "Resource"; + processingResult.SucceedCount = 1; + processingResult.FailedCount = 1; + processingResult.ErrorLogLocation = "http://dummy/error"; + + taskInfo.Result = JsonConvert.SerializeObject(new TaskResultData(TaskResult.Success, JsonConvert.SerializeObject(processingResult))); + taskInfo.Status = TaskManagement.TaskStatus.Completed; + + await taskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); + + importOrchestratorTaskContext.DataProcessingTasks[new Uri(location)] = taskInfo; + } + else + { + TaskInfo taskInfo = new TaskInfo(); + taskInfo.TaskId = Guid.NewGuid().ToString("N"); + ImportProcessingTaskInputData processingInput = new ImportProcessingTaskInputData(); + processingInput.BaseUriString = "http://dummy"; + processingInput.BeginSequenceId = i; + processingInput.EndSequenceId = i + 1; + processingInput.ResourceType = "Resource"; + taskInfo.InputData = JsonConvert.SerializeObject(processingInput); + + await taskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); + importOrchestratorTaskContext.DataProcessingTasks[new Uri(location)] = taskInfo; + } + + importOrchestratorTaskContext.Progress = ImportOrchestratorTaskProgress.SubTaskRecordsGenerated; + } + } + + importOrchestratorTaskInputData.Input = inputs; + importOrchestratorTaskInputData.InputFormat = "ndjson"; + importOrchestratorTaskInputData.InputSource = new Uri("http://dummy"); + importOrchestratorTaskInputData.MaxConcurrentProcessingTaskCount = concurrentCount; + importOrchestratorTaskInputData.ProcessingTaskQueueId = "default"; + importOrchestratorTaskInputData.RequestUri = new Uri("http://dummy"); + + integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + Dictionary properties = new Dictionary(); + properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; + properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; + return properties; + }); + + sequenceIdGenerator.GetCurrentSequenceId().Returns(_ => 0L); + + ImportOrchestratorTask orchestratorTask = new ImportOrchestratorTask( + importOrchestratorTaskInputData, + importOrchestratorTaskContext, + taskManager, + sequenceIdGenerator, + contextUpdater, + contextAccessor, + fhirDataBulkImportOperation, + integrationDataStoreClient, + loggerFactory); + orchestratorTask.PollingFrequencyInSeconds = 0; + + TaskResultData result = await orchestratorTask.ExecuteAsync(); + ImportTaskResult resultDetails = JsonConvert.DeserializeObject(result.ResultData); + Assert.Equal(TaskResult.Success, result.Result); + Assert.Equal(inputFileCount, resultDetails.Output.Count); + foreach (ImportOperationOutcome outcome in resultDetails.Output) + { + Assert.Equal(1, outcome.Count); + Assert.NotNull(outcome.InputUrl); + Assert.NotEmpty(outcome.Type); + } + + Assert.Equal(inputFileCount, resultDetails.Error.Count); + foreach (ImportFailedOperationOutcome outcome in resultDetails.Error) + { + Assert.Equal(1, outcome.Count); + Assert.NotNull(outcome.InputUrl); + Assert.NotEmpty(outcome.Type); + Assert.NotEmpty(outcome.Url); + } + + Assert.NotEmpty(resultDetails.Request); + Assert.Equal(importOrchestratorTaskInputData.TaskCreateTime, resultDetails.TransactionTime); + + var orderedSurrogatedIdRanges = surrogatedIdRanges.OrderBy(r => r.begin).ToArray(); + Assert.Equal(inputFileCount, orderedSurrogatedIdRanges.Length + resumeFrom + 1); + for (int i = 0; i < orderedSurrogatedIdRanges.Length - 1; ++i) + { + Assert.True(orderedSurrogatedIdRanges[i].end > orderedSurrogatedIdRanges[i].begin); + Assert.True(orderedSurrogatedIdRanges[i].end <= orderedSurrogatedIdRanges[i + 1].begin); + } + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/ImportProcessingTaskTests.cs b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/ImportProcessingTaskTests.cs new file mode 100644 index 0000000000..10013b5142 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/ImportProcessingTaskTests.cs @@ -0,0 +1,375 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Health.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.TaskManagement; +using Newtonsoft.Json; +using NSubstitute; +using Xunit; + +namespace Microsoft.Health.Fhir.Core.UnitTests.Features.Operations.Import +{ + public class ImportProcessingTaskTests + { + [Fact] + public async Task GivenImportTaskInput_WhenStartFromClean_ThenAllResoruceShouldBeImported() + { + ImportProcessingTaskInputData inputData = GetInputData(); + ImportProcessingProgress progress = new ImportProcessingProgress(); + await VerifyCommonImportTaskAsync(inputData, progress); + } + + [Fact] + public async Task GivenImportTaskInput_WhenStartFromMiddle_ThenAllResoruceShouldBeImported() + { + ImportProcessingTaskInputData inputData = GetInputData(); + ImportProcessingProgress progress = new ImportProcessingProgress(); + progress.SucceedImportCount = 3; + progress.FailedImportCount = 1; + progress.CurrentIndex = 4; + + await VerifyCommonImportTaskAsync(inputData, progress); + } + + [Fact] + public async Task GivenImportTaskInput_WhenExceptionThrowForImport_ThenContextShouldBeUpdatedBeforeFailure() + { + long currentIndex = 100; + ImportProcessingTaskInputData inputData = GetInputData(); + ImportProcessingProgress progress = new ImportProcessingProgress(); + + IImportResourceLoader loader = Substitute.For(); + IResourceBulkImporter importer = Substitute.For(); + IImportErrorStore importErrorStore = Substitute.For(); + IImportErrorStoreFactory importErrorStoreFactory = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + + loader.LoadResources(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any>(), Arg.Any()) + .Returns(callInfo => + { + long startIndex = (long)callInfo[1]; + Func idGenerator = (Func)callInfo[3]; + Channel resourceChannel = Channel.CreateUnbounded(); + resourceChannel.Writer.Complete(); + + return (resourceChannel, Task.CompletedTask); + }); + + importer.Import(Arg.Any>(), Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + Channel progressChannel = Channel.CreateUnbounded(); + + Task loadTask = Task.Run(async () => + { + try + { + ImportProcessingProgress progress = new ImportProcessingProgress(); + progress.CurrentIndex = currentIndex; + + await progressChannel.Writer.WriteAsync(progress); + throw new InvalidOperationException(); + } + finally + { + progressChannel.Writer.Complete(); + } + }); + + return (progressChannel, loadTask); + }); + + string context = null; + contextUpdater.UpdateContextAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + context = (string)callInfo[0]; + + return Task.CompletedTask; + }); + + ImportProcessingTask task = new ImportProcessingTask( + inputData, + progress, + loader, + importer, + importErrorStoreFactory, + contextUpdater, + contextAccessor, + loggerFactory); + + await Assert.ThrowsAsync(() => task.ExecuteAsync()); + + ImportProcessingProgress progressForContext = JsonConvert.DeserializeObject(context); + Assert.Equal(progressForContext.CurrentIndex, currentIndex); + } + + [Fact] + public async Task GivenImportTaskInput_WhenExceptionThrowForLoad_ThenRetriableExceptionShouldBeThrow() + { + ImportProcessingTaskInputData inputData = GetInputData(); + ImportProcessingProgress progress = new ImportProcessingProgress(); + + IImportResourceLoader loader = Substitute.For(); + IResourceBulkImporter importer = Substitute.For(); + IImportErrorStore importErrorStore = Substitute.For(); + IImportErrorStoreFactory importErrorStoreFactory = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + + loader.LoadResources(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any>(), Arg.Any()) + .Returns(callInfo => + { + long startIndex = (long)callInfo[1]; + Func idGenerator = (Func)callInfo[3]; + Channel resourceChannel = Channel.CreateUnbounded(); + + Task loadTask = Task.Run(() => + { + try + { + throw new InvalidOperationException(); + } + finally + { + resourceChannel.Writer.Complete(); + } + }); + + return (resourceChannel, loadTask); + }); + + ImportProcessingTask task = new ImportProcessingTask( + inputData, + progress, + loader, + importer, + importErrorStoreFactory, + contextUpdater, + contextAccessor, + loggerFactory); + + await Assert.ThrowsAsync(() => task.ExecuteAsync()); + } + + [Fact] + public async Task GivenImportTaskInput_WhenExceptionThrowForCleanData_ThenRetriableExceptionShouldBeThrow() + { + ImportProcessingTaskInputData inputData = GetInputData(); + ImportProcessingProgress progress = new ImportProcessingProgress(); + + IImportResourceLoader loader = Substitute.For(); + IResourceBulkImporter importer = Substitute.For(); + IImportErrorStore importErrorStore = Substitute.For(); + IImportErrorStoreFactory importErrorStoreFactory = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + + importer.CleanResourceAsync(Arg.Any(), Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + throw new InvalidOperationException(); + }); + + progress.NeedCleanData = true; + ImportProcessingTask task = new ImportProcessingTask( + inputData, + progress, + loader, + importer, + importErrorStoreFactory, + contextUpdater, + contextAccessor, + loggerFactory); + + await Assert.ThrowsAsync(() => task.ExecuteAsync()); + } + + [Fact] + public async Task GivenImportTaskInput_WhenOperationWasCancelledExceptionThrow_ThenTaskShouldBeCanceled() + { + ImportProcessingTaskInputData inputData = GetInputData(); + ImportProcessingProgress progress = new ImportProcessingProgress(); + + IImportResourceLoader loader = Substitute.For(); + IResourceBulkImporter importer = Substitute.For(); + IImportErrorStore importErrorStore = Substitute.For(); + IImportErrorStoreFactory importErrorStoreFactory = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + + importer.Import(Arg.Any>(), Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + throw new OperationCanceledException(); + }); + + ImportProcessingTask task = new ImportProcessingTask( + inputData, + progress, + loader, + importer, + importErrorStoreFactory, + contextUpdater, + contextAccessor, + loggerFactory); + + TaskResultData result = await task.ExecuteAsync(); + Assert.Equal(TaskResult.Canceled, result.Result); + } + + private static async Task VerifyCommonImportTaskAsync(ImportProcessingTaskInputData inputData, ImportProcessingProgress progress) + { + long startIndexFromProgress = progress.CurrentIndex; + long succeedCountFromProgress = progress.SucceedImportCount; + long failedCountFromProgress = progress.FailedImportCount; + + IImportResourceLoader loader = Substitute.For(); + IResourceBulkImporter importer = Substitute.For(); + IImportErrorStore importErrorStore = Substitute.For(); + IImportErrorStoreFactory importErrorStoreFactory = Substitute.For(); + IContextUpdater contextUpdater = Substitute.For(); + RequestContextAccessor contextAccessor = Substitute.For>(); + ILoggerFactory loggerFactory = new NullLoggerFactory(); + + long cleanStart = -1; + long cleanEnd = -1; + importer.CleanResourceAsync(Arg.Any(), Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + var inputData = (ImportProcessingTaskInputData)callInfo[0]; + var progress = (ImportProcessingProgress)callInfo[1]; + long beginSequenceId = inputData.BeginSequenceId; + long endSequenceId = inputData.EndSequenceId; + long endIndex = progress.CurrentIndex; + + cleanStart = beginSequenceId + endIndex; + cleanEnd = endSequenceId; + + return Task.CompletedTask; + }); + + loader.LoadResources(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any>(), Arg.Any()) + .Returns(callInfo => + { + long startIndex = (long)callInfo[1]; + Func idGenerator = (Func)callInfo[3]; + Channel resourceChannel = Channel.CreateUnbounded(); + + Task loadTask = Task.Run(async () => + { + ResourceWrapper resourceWrapper = new ResourceWrapper( + Guid.NewGuid().ToString(), + "0", + "Dummy", + new RawResource(Guid.NewGuid().ToString(), Fhir.Core.Models.FhirResourceFormat.Json, true), + new ResourceRequest("POST"), + DateTimeOffset.UtcNow, + false, + null, + null, + null, + "SearchParam"); + + await resourceChannel.Writer.WriteAsync(new ImportResource(idGenerator(startIndex), startIndex, resourceWrapper)); + await resourceChannel.Writer.WriteAsync(new ImportResource(idGenerator(startIndex + 1), startIndex + 1, "Error")); + resourceChannel.Writer.Complete(); + }); + + return (resourceChannel, loadTask); + }); + + importer.Import(Arg.Any>(), Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + Channel resourceChannel = (Channel)callInfo[0]; + Channel progressChannel = Channel.CreateUnbounded(); + + Task loadTask = Task.Run(async () => + { + ImportProcessingProgress progress = new ImportProcessingProgress(); + await foreach (ImportResource resource in resourceChannel.Reader.ReadAllAsync()) + { + if (string.IsNullOrEmpty(resource.ImportError)) + { + progress.SucceedImportCount++; + } + else + { + progress.FailedImportCount++; + } + + progress.CurrentIndex = resource.Index + 1; + } + + await progressChannel.Writer.WriteAsync(progress); + progressChannel.Writer.Complete(); + }); + + return (progressChannel, loadTask); + }); + + string context = null; + contextUpdater.UpdateContextAsync(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + context = (string)callInfo[0]; + + return Task.CompletedTask; + }); + + progress.NeedCleanData = true; + ImportProcessingTask task = new ImportProcessingTask( + inputData, + progress, + loader, + importer, + importErrorStoreFactory, + contextUpdater, + contextAccessor, + loggerFactory); + + TaskResultData taskResult = await task.ExecuteAsync(); + Assert.Equal(TaskResult.Success, taskResult.Result); + ImportProcessingTaskResult result = JsonConvert.DeserializeObject(taskResult.ResultData); + Assert.Equal(1 + failedCountFromProgress, result.FailedCount); + Assert.Equal(1 + succeedCountFromProgress, result.SucceedCount); + + ImportProcessingProgress progressForContext = JsonConvert.DeserializeObject(context); + Assert.Equal(progressForContext.SucceedImportCount, result.SucceedCount); + Assert.Equal(progressForContext.FailedImportCount, result.FailedCount); + Assert.Equal(startIndexFromProgress + 2, progressForContext.CurrentIndex); + + Assert.Equal(startIndexFromProgress, cleanStart); + Assert.Equal(inputData.EndSequenceId, cleanEnd); + } + + private ImportProcessingTaskInputData GetInputData() + { + ImportProcessingTaskInputData inputData = new ImportProcessingTaskInputData(); + inputData.BaseUriString = "http://dummy"; + inputData.ResourceLocation = "http://dummy"; + inputData.ResourceType = "Resource"; + inputData.TaskId = Guid.NewGuid().ToString("N"); + inputData.UriString = "http://dummy"; + + return inputData; + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/ImportResourceLoaderTests.cs b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/ImportResourceLoaderTests.cs new file mode 100644 index 0000000000..9cf6d50ff1 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/ImportResourceLoaderTests.cs @@ -0,0 +1,273 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Health.Fhir.Core.Features.Operations; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using NSubstitute; +using Xunit; + +namespace Microsoft.Health.Fhir.Core.UnitTests.Features.Operations.Import +{ + public class ImportResourceLoaderTests + { + [Fact] + public async Task GivenResourceLoader_WhenLoadResources_ThenAllResoruceShouldBeLoad() + { + await VerifyResourceLoaderAsync(1234, 21, 0); + } + + [Fact] + public async Task GivenResourceLoader_WhenLoadResourcesFromMiddle_ThenAllResoruceShouldBeLoad() + { + await VerifyResourceLoaderAsync(1234, 21, 20); + } + + [Fact] + public async Task GivenResourceLoader_WhenLoadResourcesCountEqualsBatchSize_ThenAllResoruceShouldBeLoad() + { + await VerifyResourceLoaderAsync(21, 21, 0); + } + + [Fact] + public async Task GivenResourceLoader_WhenLoadResourcesCountLessThanBatchSize_ThenAllResoruceShouldBeLoad() + { + await VerifyResourceLoaderAsync(1, 21, 0); + } + + [Fact] + public async Task GivenResourceLoader_WhenLoadResourcesWithParseException_ThenAllResoruceShouldBeLoadAndErrorShouldBeReturned() + { + string errorMessage = "error"; + using MemoryStream stream = new MemoryStream(); + using StreamWriter writer = new StreamWriter(stream); + await writer.WriteLineAsync("test"); + await writer.FlushAsync(); + + stream.Position = 0; + + IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); + integrationDataStoreClient.DownloadResource(Arg.Any(), Arg.Any(), Arg.Any()).ReturnsForAnyArgs(stream); + integrationDataStoreClient.TryAcquireLeaseAsync(Arg.Any(), Arg.Any(), Arg.Any()).ReturnsForAnyArgs(string.Empty); + + IImportResourceParser importResourceParser = Substitute.For(); + importResourceParser.Parse(Arg.Any(), Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + throw new InvalidOperationException(errorMessage); + }); + + IImportErrorSerializer serializer = Substitute.For(); + serializer.Serialize(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + Exception ex = (Exception)callInfo[1]; + return ex.Message; + }); + + Func idGenerator = (i) => i; + ImportResourceLoader loader = new ImportResourceLoader(integrationDataStoreClient, importResourceParser, serializer, NullLogger.Instance); + + (Channel outputChannel, Task importTask) = loader.LoadResources("http://dummy", 0, null, idGenerator, CancellationToken.None); + + int errorCount = 0; + await foreach (ImportResource resource in outputChannel.Reader.ReadAllAsync()) + { + Assert.Equal(errorMessage, resource.ImportError); + ++errorCount; + } + + await importTask; + + Assert.Equal(1, errorCount); + } + + [Fact] + public async Task GivenResourceLoader_WhenLoadResourcesWithDifferentResourceType_ThenResourcesWithDifferentTypeShouldBeSkipped() + { + string errorMessage = "Resource type not match."; + using MemoryStream stream = new MemoryStream(); + using StreamWriter writer = new StreamWriter(stream); + await writer.WriteLineAsync("test"); + await writer.FlushAsync(); + + stream.Position = 0; + + IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); + integrationDataStoreClient.DownloadResource(Arg.Any(), Arg.Any(), Arg.Any()).ReturnsForAnyArgs(stream); + integrationDataStoreClient.TryAcquireLeaseAsync(Arg.Any(), Arg.Any(), Arg.Any()).ReturnsForAnyArgs(string.Empty); + + IImportResourceParser importResourceParser = Substitute.For(); + importResourceParser.Parse(Arg.Any(), Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + ImportResource importResource = new ImportResource(null); + return importResource; + }); + + IImportErrorSerializer serializer = Substitute.For(); + serializer.Serialize(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + Exception ex = (Exception)callInfo[1]; + return ex.Message; + }); + + Func idGenerator = (i) => i; + ImportResourceLoader loader = new ImportResourceLoader(integrationDataStoreClient, importResourceParser, serializer, NullLogger.Instance); + + (Channel outputChannel, Task importTask) = loader.LoadResources("http://dummy", 0, "DummyType", idGenerator, CancellationToken.None); + + int errorCount = 0; + await foreach (ImportResource resource in outputChannel.Reader.ReadAllAsync()) + { + Assert.Equal(errorMessage, resource.ImportError); + ++errorCount; + } + + await importTask; + + Assert.Equal(1, errorCount); + } + + [Fact] + public async Task GivenResourceLoader_WhenCancelLoadTask_ThenDataLoadTaskShouldBeCanceled() + { + string errorMessage = "error"; + using MemoryStream stream = new MemoryStream(); + using StreamWriter writer = new StreamWriter(stream); + await writer.WriteLineAsync("test"); + await writer.WriteLineAsync("test"); + await writer.WriteLineAsync("test"); + await writer.WriteLineAsync("test"); + await writer.FlushAsync(); + + stream.Position = 0; + + AutoResetEvent resetEvent1 = new AutoResetEvent(false); + ManualResetEvent resetEvent2 = new ManualResetEvent(false); + + IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); + integrationDataStoreClient.DownloadResource(Arg.Any(), Arg.Any(), Arg.Any()).ReturnsForAnyArgs(stream); + integrationDataStoreClient.TryAcquireLeaseAsync(Arg.Any(), Arg.Any(), Arg.Any()).ReturnsForAnyArgs(string.Empty); + + IImportResourceParser importResourceParser = Substitute.For(); + importResourceParser.Parse(Arg.Any(), Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + resetEvent1.Set(); + resetEvent2.WaitOne(); + + throw new InvalidCastException(errorMessage); + }); + + IImportErrorSerializer serializer = Substitute.For(); + serializer.Serialize(Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + Exception ex = (Exception)callInfo[1]; + return ex.Message; + }); + + Func idGenerator = (i) => i; + ImportResourceLoader loader = new ImportResourceLoader(integrationDataStoreClient, importResourceParser, serializer, NullLogger.Instance); + + CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); + (Channel outputChannel, Task importTask) = loader.LoadResources("http://dummy", 0, null, idGenerator, cancellationTokenSource.Token); + + resetEvent1.WaitOne(); + cancellationTokenSource.Cancel(); + resetEvent2.Set(); + + await foreach (ImportResource resource in outputChannel.Reader.ReadAllAsync()) + { + // do nothing. + } + + try + { + await importTask; + throw new InvalidOperationException(); + } + catch (TaskCanceledException) + { + // Expected error + } + catch (OperationCanceledException) + { + // Expected error + } + } + + private async Task VerifyResourceLoaderAsync(int resourcCount, int batchSize, long startIndex) + { + long startId = 1; + List inputStrings = new List(); + using MemoryStream stream = new MemoryStream(); + using StreamWriter writer = new StreamWriter(stream); + for (int i = 0; i < resourcCount; ++i) + { + string content = (i + startId).ToString(); + inputStrings.Add(content); + await writer.WriteLineAsync(content); + } + + await writer.FlushAsync(); + stream.Position = 0; + + IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); + integrationDataStoreClient.DownloadResource(Arg.Any(), Arg.Any(), Arg.Any()).ReturnsForAnyArgs(stream); + integrationDataStoreClient.TryAcquireLeaseAsync(Arg.Any(), Arg.Any(), Arg.Any()).ReturnsForAnyArgs(string.Empty); + + IImportResourceParser importResourceParser = Substitute.For(); + importResourceParser.Parse(Arg.Any(), Arg.Any(), Arg.Any()) + .Returns(callInfo => + { + long surrogatedId = (long)callInfo[0]; + long index = (long)callInfo[1]; + string content = (string)callInfo[2]; + ResourceWrapper resourceWrapper = new ResourceWrapper( + content, + "0", + "Dummy", + new RawResource(content, Fhir.Core.Models.FhirResourceFormat.Json, true), + new ResourceRequest("POST"), + DateTimeOffset.UtcNow, + false, + null, + null, + null, + "SearchParam"); + return new ImportResource(surrogatedId, index, resourceWrapper); + }); + + IImportErrorSerializer serializer = Substitute.For(); + + Func idGenerator = (i) => startId + i; + ImportResourceLoader loader = new ImportResourceLoader(integrationDataStoreClient, importResourceParser, serializer, NullLogger.Instance); + loader.MaxBatchSize = batchSize; + + (Channel outputChannel, Task importTask) = loader.LoadResources("http://dummy", startIndex, null, idGenerator, CancellationToken.None); + + long currentIndex = startIndex; + await foreach (ImportResource resource in outputChannel.Reader.ReadAllAsync()) + { + string content = idGenerator(currentIndex++).ToString(); + Assert.Equal(content, resource.Resource.ResourceId); + } + + await importTask; + + Assert.Equal(resourcCount, currentIndex); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/TestTaskManager.cs b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/TestTaskManager.cs new file mode 100644 index 0000000000..72060f4310 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/TestTaskManager.cs @@ -0,0 +1,48 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Health.TaskManagement; + +namespace Microsoft.Health.Fhir.Core.UnitTests.Features.Operations.Import +{ + public class TestTaskManager : ITaskManager + { + public TestTaskManager(Func getTaskInfoFunc) + { + GetTaskInfoFunc = getTaskInfoFunc; + } + + public List TaskInfos { get; set; } = new List(); + + public Func GetTaskInfoFunc { get; set; } + + public Task CancelTaskAsync(string taskId, CancellationToken cancellationToken) + { + TaskInfo taskInfo = TaskInfos.FirstOrDefault(t => taskId.Equals(t.TaskId)); + taskInfo.IsCanceled = true; + + return Task.FromResult(taskInfo); + } + + public Task CreateTaskAsync(TaskInfo task, bool isUniqueTaskByType, CancellationToken cancellationToken) + { + TaskInfos.Add(task); + + return Task.FromResult(task); + } + + public Task GetTaskAsync(string taskId, CancellationToken cancellationToken) + { + TaskInfo taskInfo = TaskInfos.FirstOrDefault(t => taskId.Equals(t.TaskId)); + + return Task.FromResult(GetTaskInfoFunc?.Invoke(taskInfo)); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core.UnitTests/Microsoft.Health.Fhir.Core.UnitTests.csproj b/src/Microsoft.Health.Fhir.Core.UnitTests/Microsoft.Health.Fhir.Core.UnitTests.csproj index c2bcd1b323..19aa4c1ff7 100644 --- a/src/Microsoft.Health.Fhir.Core.UnitTests/Microsoft.Health.Fhir.Core.UnitTests.csproj +++ b/src/Microsoft.Health.Fhir.Core.UnitTests/Microsoft.Health.Fhir.Core.UnitTests.csproj @@ -20,6 +20,7 @@ + diff --git a/src/Microsoft.Health.Fhir.Core/Configs/ImportTaskConfiguration.cs b/src/Microsoft.Health.Fhir.Core/Configs/ImportTaskConfiguration.cs new file mode 100644 index 0000000000..09be930163 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Configs/ImportTaskConfiguration.cs @@ -0,0 +1,102 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +namespace Microsoft.Health.Fhir.Core.Configs +{ + public class ImportTaskConfiguration + { + private const int DefaultMaxRunningProcessingTaskCount = 5; + private const int DefaultMaxRetryCount = 5; + private const int DefaultSqlImportBatchSizeForCheckpoint = 80000; + private const int DefaultSqlBatchSizeForImportResourceOperation = 2000; + private const int DefaultSqlBatchSizeForImportParamsOperation = 10000; + private const int DefaultSqlMaxImportOperationConcurrentCount = 5; + private const int DefaultSqlCleanResourceBatchSize = 1000; + private const int DefaultSqlMaxRebuildIndexOperationConcurrentCount = 3; + private const int DefaultSqlMaxDeleteDuplicateOperationConcurrentCount = 3; + private const int DefaultSqlLongRunningOperationTimeoutInSec = 60 * 60 * 2; + private const int DefaultSqlBulkOperationTimeoutInSec = 60 * 10; + + /// + /// Determines whether bulk import is enabled or not. + /// + public bool Enabled { get; set; } + + /// + /// Initial import mode + /// + public bool InitialImportMode { get; set; } + + /// + /// Queue id for data processing task. it might be different from orchestraotr task for standalone runtime environment. + /// + public string ProcessingTaskQueueId { get; set; } + + /// + /// Controls how many data processing task would run at the same time. + /// + public int MaxRunningProcessingTaskCount { get; set; } = DefaultMaxRunningProcessingTaskCount; + + /// + /// Controls how many data processing task would run at the same time. + /// + public short MaxRetryCount { get; set; } = DefaultMaxRetryCount; + + /// + /// Long running operation timeout + /// + public int SqlLongRunningOperationTimeoutInSec { get; set; } = DefaultSqlLongRunningOperationTimeoutInSec; + + /// + /// SQL bulk operation timeout in seconds + /// + public int SqlBulkOperationTimeoutInSec { get; set; } = DefaultSqlBulkOperationTimeoutInSec; + + /// + /// Max batch size for import resource operation + /// + public int SqlBatchSizeForImportResourceOperation { get; set; } = DefaultSqlBatchSizeForImportResourceOperation; + + /// + /// Max batch size for import resoruce search params operation + /// + public int SqlBatchSizeForImportParamsOperation { get; set; } = DefaultSqlBatchSizeForImportParamsOperation; + + /// + /// Max concurrent count for import operation + /// + public int SqlMaxImportOperationConcurrentCount { get; set; } = DefaultSqlMaxImportOperationConcurrentCount; + + /// + /// Checkpoint batch size + /// + public int SqlImportBatchSizeForCheckpoint { get; set; } = DefaultSqlImportBatchSizeForCheckpoint; + + /// + /// Batch size to clean duplicated resource with same resource id. + /// + public int SqlCleanResourceBatchSize { get; set; } = DefaultSqlCleanResourceBatchSize; + + /// + /// Concurrent count for rebuild index operation. + /// + public int SqlMaxRebuildIndexOperationConcurrentCount { get; set; } = DefaultSqlMaxRebuildIndexOperationConcurrentCount; + + /// + /// Concurrent count for delete duplicate resource operation. + /// + public int SqlMaxDeleteDuplicateOperationConcurrentCount { get; set; } = DefaultSqlMaxDeleteDuplicateOperationConcurrentCount; + + /// + /// Disable optional index during import data. + /// + public bool DisableOptionalIndexesForImport { get; set; } + + /// + /// Disable unique optional index during import data. + /// + public bool DisableUniqueOptionalIndexesForImport { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Configs/IntegrationDataStoreConfiguration.cs b/src/Microsoft.Health.Fhir.Core/Configs/IntegrationDataStoreConfiguration.cs new file mode 100644 index 0000000000..ce04b9d09b --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Configs/IntegrationDataStoreConfiguration.cs @@ -0,0 +1,34 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +namespace Microsoft.Health.Fhir.Core.Configs +{ + public class IntegrationDataStoreConfiguration + { + private const int DefaultMaxRetryCount = 3; + + private const int DefaultRetryInternalInSeconds = 5; + + private const int DefaultMaxWaitTimeInSeconds = -1; + + public string StorageAccountConnection { get; set; } = string.Empty; + + /// + /// Determines the storage account connection that will be used to integration data store to. + /// Should be a uri pointing to the required storage account. + /// + [System.Diagnostics.CodeAnalysis.SuppressMessage( + "Usage", + "CA1056:Uri properties should not be strings", + Justification = "Set from an environment variable.")] + public string StorageAccountUri { get; set; } = string.Empty; + + public int MaxRetryCount { get; set; } = DefaultMaxRetryCount; + + public int RetryInternalInSecondes { get; set; } = DefaultRetryInternalInSeconds; + + public int MaxWaitTimeInSeconds { get; set; } = DefaultMaxWaitTimeInSeconds; + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Configs/OperationsConfiguration.cs b/src/Microsoft.Health.Fhir.Core/Configs/OperationsConfiguration.cs index e8174c7f17..8bb8454d6a 100644 --- a/src/Microsoft.Health.Fhir.Core/Configs/OperationsConfiguration.cs +++ b/src/Microsoft.Health.Fhir.Core/Configs/OperationsConfiguration.cs @@ -14,5 +14,9 @@ public class OperationsConfiguration public ConvertDataConfiguration ConvertData { get; set; } = new ConvertDataConfiguration(); public ValidateOperationConfiguration Validate { get; set; } = new ValidateOperationConfiguration(); + + public IntegrationDataStoreConfiguration IntegrationDataStore { get; set; } = new IntegrationDataStoreConfiguration(); + + public ImportTaskConfiguration Import { get; set; } = new ImportTaskConfiguration(); } } diff --git a/src/Microsoft.Health.Fhir.Core/Configs/TaskHostingConfiguration.cs b/src/Microsoft.Health.Fhir.Core/Configs/TaskHostingConfiguration.cs index d498e86c8c..9f91095b10 100644 --- a/src/Microsoft.Health.Fhir.Core/Configs/TaskHostingConfiguration.cs +++ b/src/Microsoft.Health.Fhir.Core/Configs/TaskHostingConfiguration.cs @@ -7,10 +7,36 @@ namespace Microsoft.Health.Fhir.Core.Configs { public class TaskHostingConfiguration { + public const string DefaultQueueId = "default"; + + /// + /// Enable the task hosting service. + /// public bool Enabled { get; set; } - public string QueueId { get; set; } + /// + /// The queue id for task hosting to pull task. + /// + public string QueueId { get; set; } = DefaultQueueId; + + /// + /// Heartbeat timeout for task. + /// + public int? TaskHeartbeatTimeoutThresholdInSeconds { get; set; } + + /// + /// Polling frequency for task hosting to pull task. + /// + public int? PollingFrequencyInSeconds { get; set; } + + /// + /// Max running task count at the same time. + /// + public short? MaxRunningTaskCount { get; set; } - public int TaskHeartbeatTimeoutThresholdInSeconds { get; set; } + /// + /// Heartbeat request interval. + /// + public int? TaskHeartbeatIntervalInSeconds { get; set; } } } diff --git a/src/Microsoft.Health.Fhir.Core/Extensions/ImportMediatorExtensions.cs b/src/Microsoft.Health.Fhir.Core/Extensions/ImportMediatorExtensions.cs new file mode 100644 index 0000000000..6e870260ba --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Extensions/ImportMediatorExtensions.cs @@ -0,0 +1,58 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using EnsureThat; +using MediatR; +using Microsoft.Health.Fhir.Core.Features.Operations.Import.Models; +using Microsoft.Health.Fhir.Core.Messages.Import; + +namespace Microsoft.Health.Fhir.Core.Extensions +{ + public static class ImportMediatorExtensions + { + public static async Task ImportAsync( + this IMediator mediator, + Uri requestUri, + string inputFormat, + Uri inputSource, + IReadOnlyList input, + ImportRequestStorageDetail storageDetail, + CancellationToken cancellationToken) + { + EnsureArg.IsNotNull(mediator, nameof(mediator)); + EnsureArg.IsNotNull(requestUri, nameof(requestUri)); + + var request = new CreateImportRequest(requestUri, inputFormat, inputSource, input, storageDetail); + + CreateImportResponse response = await mediator.Send(request, cancellationToken); + return response; + } + + public static async Task GetImportStatusAsync(this IMediator mediator, string jobId, CancellationToken cancellationToken) + { + EnsureArg.IsNotNull(mediator, nameof(mediator)); + EnsureArg.IsNotNullOrWhiteSpace(jobId, nameof(jobId)); + + var request = new GetImportRequest(jobId); + + GetImportResponse response = await mediator.Send(request, cancellationToken); + return response; + } + + public static async Task CancelImportAsync(this IMediator mediator, string jobId, CancellationToken cancellationToken) + { + EnsureArg.IsNotNull(mediator, nameof(mediator)); + EnsureArg.IsNotNullOrWhiteSpace(jobId, nameof(jobId)); + + var request = new CancelImportRequest(jobId); + + return await mediator.Send(request, cancellationToken); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportDestinationClient/AccessTokenProviderException.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/AccessTokenProviderException.cs similarity index 88% rename from src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportDestinationClient/AccessTokenProviderException.cs rename to src/Microsoft.Health.Fhir.Core/Features/Operations/AccessTokenProviderException.cs index a40b94b45f..85b486703b 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportDestinationClient/AccessTokenProviderException.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/AccessTokenProviderException.cs @@ -6,7 +6,7 @@ using System; using EnsureThat; -namespace Microsoft.Health.Fhir.Core.Features.Operations.Export.ExportDestinationClient +namespace Microsoft.Health.Fhir.Core.Features.Operations { public class AccessTokenProviderException : Exception { diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportDestinationClient/IAccessTokenProvider.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/IAccessTokenProvider.cs similarity index 92% rename from src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportDestinationClient/IAccessTokenProvider.cs rename to src/Microsoft.Health.Fhir.Core/Features/Operations/IAccessTokenProvider.cs index a130e64f7a..f4b7aec814 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportDestinationClient/IAccessTokenProvider.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/IAccessTokenProvider.cs @@ -7,7 +7,7 @@ using System.Threading; using System.Threading.Tasks; -namespace Microsoft.Health.Fhir.Core.Features.Operations.Export.ExportDestinationClient +namespace Microsoft.Health.Fhir.Core.Features.Operations { public interface IAccessTokenProvider { diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/ICompressedRawResourceConverter.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/ICompressedRawResourceConverter.cs new file mode 100644 index 0000000000..69f3e3d03d --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/ICompressedRawResourceConverter.cs @@ -0,0 +1,29 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.IO; +using System.Threading.Tasks; + +namespace Microsoft.Health.Fhir.Core.Features.Operations +{ + /// + /// Handles converting raw resource strings and compressed streams. + /// + public interface ICompressedRawResourceConverter + { + /// + /// Read from compressed resource stream to string + /// + /// Compressed resource stream + public Task ReadCompressedRawResource(Stream compressedResourceStream); + + /// + /// Convert rawResource string to compressed stream + /// + /// Output steam for compressed data. + /// Input raw resource string. + public void WriteCompressedRawResource(Stream outputStream, string rawResource); + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/IIntegrationDataStoreClient.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/IIntegrationDataStoreClient.cs new file mode 100644 index 0000000000..9902c6e828 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/IIntegrationDataStoreClient.cs @@ -0,0 +1,83 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.Health.Fhir.Core.Features.Operations +{ + /// + /// Client for integration data store. + /// + public interface IIntegrationDataStoreClient + { + /// + /// Download resource stream by location + /// + /// Resource URI + /// Start offset in the file + /// Cancellation Token + public Stream DownloadResource(Uri resourceUri, long startOffset, CancellationToken cancellationToken); + + /// + /// Prepare for new resource + /// + /// Container id for new resourc file + /// Resource file name. + /// Cancellation Token. + public Task PrepareResourceAsync(string containerId, string fileName, CancellationToken cancellationToken); + + /// + /// Upload part of resource file in block. + /// + /// Resource URI. + /// Content stream. + /// Id for this block. + /// Cancellation token. + public Task UploadBlockAsync(Uri resourceUri, Stream stream, string blockId, CancellationToken cancellationToken); + + /// + /// Append new blocks to current resource file. + /// + /// Resource URI. + /// New block ids. + /// Cancellation token. + public Task AppendCommitAsync(Uri resourceUri, string[] blockIds, CancellationToken cancellationToken); + + /// + /// Commit all blocks in resourc file. + /// + /// Resource URI + /// Block id list. + /// Cancellation Token. + public Task CommitAsync(Uri resourceUri, string[] blockIds, CancellationToken cancellationToken); + + /// + /// Get resource file properties. + /// + /// Resource URI. + /// Cancellation Token. + public Task> GetPropertiesAsync(Uri resourceUri, CancellationToken cancellationToken); + + /// + /// Try acquire lease on resource file. + /// + /// Resource URI. + /// Proposed LeaseId. + /// Cancellation Token. + public Task TryAcquireLeaseAsync(Uri resourceUri, string proposedLeaseId, CancellationToken cancellationToken); + + /// + /// Try to release lease on resource file. + /// + /// Resource URI. + /// Lease id for the resource file. + /// Cancellation Token. + public Task TryReleaseLeaseAsync(Uri resourceUri, string leaseId, CancellationToken cancellationToken); + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/IIntegrationDataStoreClientInitilizer.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/IIntegrationDataStoreClientInitilizer.cs new file mode 100644 index 0000000000..07ec763fad --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/IIntegrationDataStoreClientInitilizer.cs @@ -0,0 +1,29 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Health.Fhir.Core.Configs; + +namespace Microsoft.Health.Fhir.Core.Features.Operations +{ + public interface IIntegrationDataStoreClientInitilizer + { + /// + /// Used to get a client that is authorized to talk to the integration data store. + /// + /// Cancellation token. + /// A client of type T + Task GetAuthorizedClientAsync(CancellationToken cancellationToken); + + /// + /// Used to get a client that is authorized to talk to the integration data store. + /// + /// Integration dataStore configuration + /// Cancellation token. + /// A client of type T + Task GetAuthorizedClientAsync(IntegrationDataStoreConfiguration integrationDataStoreConfiguration, CancellationToken cancellationToken); + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/CancelImportRequestHandler.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/CancelImportRequestHandler.cs new file mode 100644 index 0000000000..413e1015f3 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/CancelImportRequestHandler.cs @@ -0,0 +1,65 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Net; +using System.Threading; +using System.Threading.Tasks; +using EnsureThat; +using MediatR; +using Microsoft.Extensions.Logging; +using Microsoft.Health.Core.Features.Security.Authorization; +using Microsoft.Health.Fhir.Core.Exceptions; +using Microsoft.Health.Fhir.Core.Features.Security; +using Microsoft.Health.Fhir.Core.Messages.Import; +using Microsoft.Health.TaskManagement; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class CancelImportRequestHandler : IRequestHandler + { + private readonly ITaskManager _taskManager; + private readonly IAuthorizationService _authorizationService; + private readonly ILogger _logger; + + public CancelImportRequestHandler(ITaskManager taskManager, IAuthorizationService authorizationService, ILogger logger) + { + EnsureArg.IsNotNull(taskManager, nameof(taskManager)); + EnsureArg.IsNotNull(authorizationService, nameof(authorizationService)); + EnsureArg.IsNotNull(logger, nameof(logger)); + + _taskManager = taskManager; + _authorizationService = authorizationService; + _logger = logger; + } + + public async Task Handle(CancelImportRequest request, CancellationToken cancellationToken) + { + EnsureArg.IsNotNull(request, nameof(request)); + + if (await _authorizationService.CheckAccess(DataActions.Import, cancellationToken) != DataActions.Import) + { + throw new UnauthorizedFhirActionException(); + } + + try + { + TaskInfo taskInfo = await _taskManager.CancelTaskAsync(request.TaskId, cancellationToken); + + if (taskInfo.Status == TaskManagement.TaskStatus.Completed) + { + throw new OperationFailedException(Resources.ImportOperationCompleted, HttpStatusCode.Conflict); + } + else + { + return new CancelImportResponse(HttpStatusCode.Accepted); + } + } + catch (TaskNotExistException) + { + throw new ResourceNotFoundException(string.Format(Resources.ImportTaskNotFound, request.TaskId)); + } + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/CreateImportRequestHandler.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/CreateImportRequestHandler.cs new file mode 100644 index 0000000000..61792f83f7 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/CreateImportRequestHandler.cs @@ -0,0 +1,106 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Net; +using System.Threading; +using System.Threading.Tasks; +using EnsureThat; +using MediatR; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.Health.Core; +using Microsoft.Health.Core.Features.Security.Authorization; +using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Exceptions; +using Microsoft.Health.Fhir.Core.Features.Security; +using Microsoft.Health.Fhir.Core.Messages.Import; +using Microsoft.Health.TaskManagement; +using Newtonsoft.Json; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + /// + /// MediatR request handler. Called when the ImportController creates an Import job. + /// + public class CreateImportRequestHandler : IRequestHandler + { + private readonly ITaskManager _taskManager; + private readonly ImportTaskConfiguration _importTaskConfiguration; + private readonly TaskHostingConfiguration _taskHostingConfiguration; + private readonly ILogger _logger; + private readonly IAuthorizationService _authorizationService; + + public CreateImportRequestHandler( + ITaskManager taskManager, + IOptions operationsConfig, + IOptions taskHostingConfiguration, + ILogger logger, + IAuthorizationService authorizationService) + { + EnsureArg.IsNotNull(taskManager, nameof(taskManager)); + EnsureArg.IsNotNull(operationsConfig.Value, nameof(operationsConfig)); + EnsureArg.IsNotNull(taskHostingConfiguration.Value, nameof(taskHostingConfiguration)); + EnsureArg.IsNotNull(authorizationService, nameof(authorizationService)); + EnsureArg.IsNotNull(logger, nameof(logger)); + + _taskManager = taskManager; + _importTaskConfiguration = operationsConfig.Value.Import; + _taskHostingConfiguration = taskHostingConfiguration.Value; + _authorizationService = authorizationService; + _logger = logger; + } + + public async Task Handle(CreateImportRequest request, CancellationToken cancellationToken) + { + EnsureArg.IsNotNull(request, nameof(request)); + + if (await _authorizationService.CheckAccess(DataActions.Import, cancellationToken) != DataActions.Import) + { + throw new UnauthorizedFhirActionException(); + } + + string taskId = Guid.NewGuid().ToString("N"); + + // Processing task might be dispatch to different environment with differenet queueid later. + string processingTaskQueueId = string.IsNullOrEmpty(_importTaskConfiguration.ProcessingTaskQueueId) ? _taskHostingConfiguration.QueueId : _importTaskConfiguration.ProcessingTaskQueueId; + ImportOrchestratorTaskInputData inputData = new ImportOrchestratorTaskInputData() + { + RequestUri = request.RequestUri, + BaseUri = new Uri(request.RequestUri.GetLeftPart(UriPartial.Authority)), + Input = request.Input, + InputFormat = request.InputFormat, + InputSource = request.InputSource, + StorageDetail = request.StorageDetail, + MaxConcurrentProcessingTaskCount = _importTaskConfiguration.MaxRunningProcessingTaskCount, + ProcessingTaskQueueId = processingTaskQueueId, + ProcessingTaskMaxRetryCount = _importTaskConfiguration.MaxRetryCount, + TaskId = taskId, + TaskCreateTime = Clock.UtcNow, + }; + + TaskInfo taskInfo = new TaskInfo() + { + TaskId = taskId, + TaskTypeId = ImportOrchestratorTask.ImportOrchestratorTaskId, + MaxRetryCount = _importTaskConfiguration.MaxRetryCount, + QueueId = _taskHostingConfiguration.QueueId, + InputData = JsonConvert.SerializeObject(inputData), + }; + + try + { + await _taskManager.CreateTaskAsync(taskInfo, true, cancellationToken); + } + catch (TaskConflictException) + { + _logger.LogInformation("Already a running import task."); + throw new OperationFailedException(Resources.ImportTaskIsRunning, HttpStatusCode.Conflict); + } + + return new CreateImportResponse(taskId); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs new file mode 100644 index 0000000000..a54eba72c3 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs @@ -0,0 +1,84 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Net; +using System.Threading; +using System.Threading.Tasks; +using EnsureThat; +using MediatR; +using Microsoft.Health.Core.Features.Security.Authorization; +using Microsoft.Health.Fhir.Core.Exceptions; +using Microsoft.Health.Fhir.Core.Features.Security; +using Microsoft.Health.Fhir.Core.Messages.Import; +using Microsoft.Health.TaskManagement; +using Newtonsoft.Json; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class GetImportRequestHandler : IRequestHandler + { + private readonly ITaskManager _taskManager; + private readonly IAuthorizationService _authorizationService; + + public GetImportRequestHandler(ITaskManager taskManager, IAuthorizationService authorizationService) + { + EnsureArg.IsNotNull(taskManager, nameof(taskManager)); + EnsureArg.IsNotNull(authorizationService, nameof(authorizationService)); + + _taskManager = taskManager; + _authorizationService = authorizationService; + } + + public async Task Handle(GetImportRequest request, CancellationToken cancellationToken) + { + EnsureArg.IsNotNull(request, nameof(request)); + + if (await _authorizationService.CheckAccess(DataActions.Import, cancellationToken) != DataActions.Import) + { + throw new UnauthorizedFhirActionException(); + } + + TaskInfo taskInfo = await _taskManager.GetTaskAsync(request.TaskId, cancellationToken); + + if (taskInfo == null) + { + throw new ResourceNotFoundException(string.Format(Resources.ImportTaskNotFound, request.TaskId)); + } + + if (taskInfo.Status != TaskManagement.TaskStatus.Completed) + { + if (taskInfo.IsCanceled) + { + throw new OperationFailedException(Resources.UserRequestedCancellation, HttpStatusCode.BadRequest); + } + + return new GetImportResponse(HttpStatusCode.Accepted); + } + else + { + TaskResultData resultData = JsonConvert.DeserializeObject(taskInfo.Result); + if (resultData.Result == TaskResult.Success) + { + ImportTaskResult result = JsonConvert.DeserializeObject(resultData.ResultData); + return new GetImportResponse(HttpStatusCode.OK, result); + } + else if (resultData.Result == TaskResult.Fail) + { + ImportTaskErrorResult errorResult = JsonConvert.DeserializeObject(resultData.ResultData); + + string failureReason = errorResult.ErrorMessage; + HttpStatusCode failureStatusCode = errorResult.HttpStatusCode; + + throw new OperationFailedException( + string.Format(Resources.OperationFailed, OperationsConstants.Import, failureReason), failureStatusCode); + } + else + { + throw new OperationFailedException(Resources.UserRequestedCancellation, HttpStatusCode.BadRequest); + } + } + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportErrorSerializer.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportErrorSerializer.cs new file mode 100644 index 0000000000..5b1c6b7669 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportErrorSerializer.cs @@ -0,0 +1,31 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + /// + /// Serializer for error of import operation + /// + public interface IImportErrorSerializer + { + /// + /// Serialize import error into operation output. + /// + /// Error index in file. + /// Exception + /// Error in string format. + public string Serialize(long index, Exception ex); + + /// + /// Serialize import error into operation output. + /// + /// Error index in file. + /// Error Message + /// Error in string format. + public string Serialize(long index, string errorMessage); + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportErrorStore.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportErrorStore.cs new file mode 100644 index 0000000000..3505a1ad92 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportErrorStore.cs @@ -0,0 +1,28 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + /// + /// Store for import error + /// + public interface IImportErrorStore + { + /// + /// Error file location. + /// + public string ErrorFileLocation { get; } + + /// + /// Upload import error to store. + /// + /// Import errors in string format. + /// Cancellation token. + public Task UploadErrorsAsync(string[] importErrors, CancellationToken cancellationToken); + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportErrorStoreFactory.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportErrorStoreFactory.cs new file mode 100644 index 0000000000..785863e8b0 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportErrorStoreFactory.cs @@ -0,0 +1,23 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + /// + /// Factory for import error store. + /// + public interface IImportErrorStoreFactory + { + /// + /// Initialize error store. + /// + /// Error file name. + /// Cancellation Token. + public Task InitializeAsync(string fileName, CancellationToken cancellationToken); + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportOrchestratorTaskDataStoreOperation.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportOrchestratorTaskDataStoreOperation.cs new file mode 100644 index 0000000000..d575ac247b --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportOrchestratorTaskDataStoreOperation.cs @@ -0,0 +1,25 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public interface IImportOrchestratorTaskDataStoreOperation + { + /// + /// Pre-process before import operation. + /// + /// Cancellation Token + public Task PreprocessAsync(CancellationToken cancellationToken); + + /// + /// Post-process after import operation. + /// + /// Cancellation Token + public Task PostprocessAsync(CancellationToken cancellationToken); + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportResourceLoader.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportResourceLoader.cs new file mode 100644 index 0000000000..96a47adef9 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportResourceLoader.cs @@ -0,0 +1,28 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + /// + /// Loader for import resource + /// + public interface IImportResourceLoader + { + /// + /// Load import resource to channel. + /// + /// resource location + /// start index in resource file. + /// FHIR resource type. + /// Sequence id generator. + /// Cancellation Token. + public (Channel resourceChannel, Task loadTask) LoadResources(string resourceLocation, long startIndex, string resourceType, Func sequenceIdGenerator, CancellationToken cancellationToken); + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportResourceParser.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportResourceParser.cs new file mode 100644 index 0000000000..beaeda7b54 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IImportResourceParser.cs @@ -0,0 +1,22 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + /// + /// Parser for raw data into ImportResource. + /// + public interface IImportResourceParser + { + /// + /// Parse raw resource data. + /// + /// sequence id of the resource. + /// index of the resource. + /// raw content in string format. + /// ImportResource + public ImportResource Parse(long id, long index, string rawContent); + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IResourceBulkImporter.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IResourceBulkImporter.cs new file mode 100644 index 0000000000..22c07f0abc --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IResourceBulkImporter.cs @@ -0,0 +1,33 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + /// + /// Importer for ImportResoruce into data store. + /// + public interface IResourceBulkImporter + { + /// + /// Import resource into data store. + /// + /// Input channel for resource. + /// Import error store. + /// Cancellation Token. + public (Channel progressChannel, Task importTask) Import(Channel inputChannel, IImportErrorStore importErrorStore, CancellationToken cancellationToken); + + /// + /// Initialize import + /// + /// Import processing task input data. + /// Import processing task progress. + /// Cancellation Token. + public Task CleanResourceAsync(ImportProcessingTaskInputData inputData, ImportProcessingProgress progress, CancellationToken cancellationToken); + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IResourceMetaPopulator.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IResourceMetaPopulator.cs new file mode 100644 index 0000000000..a9d3dc2400 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/IResourceMetaPopulator.cs @@ -0,0 +1,22 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using Hl7.Fhir.Model; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + /// + /// Populate resource with meta content. + /// + public interface IResourceMetaPopulator + { + /// + /// Populate meta content. + /// + /// sequence id of the resource. + /// resource. + public void Populate(long id, Resource resource); + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ISequenceIdGenerator.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ISequenceIdGenerator.cs new file mode 100644 index 0000000000..253aaa0259 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ISequenceIdGenerator.cs @@ -0,0 +1,20 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + /// + /// Generator for sequence id. + /// + /// Sequence id for type T. + public interface ISequenceIdGenerator + { + /// + /// Get current sequence id. + /// + /// Sequence id for type T. + T GetCurrentSequenceId(); + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportConstants.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportConstants.cs new file mode 100644 index 0000000000..d859b43947 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportConstants.cs @@ -0,0 +1,12 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public static class ImportConstants + { + public const string InitialLoadMode = "InitialLoad"; + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportError.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportError.cs new file mode 100644 index 0000000000..7747c3334b --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportError.cs @@ -0,0 +1,34 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportError + { + public ImportError(long id, long index, Exception exception) + { + Id = id; + Index = index; + Exception = exception; + } + + /// + /// Sequence ID for resource + /// + public long Id { get; set; } + + /// + /// Index in the resource file. + /// + public long Index { get; set; } + + /// + /// Exception during processing data. + /// + public Exception Exception { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStore.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStore.cs new file mode 100644 index 0000000000..b4dbe642cb --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStore.cs @@ -0,0 +1,62 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.IO; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportErrorStore : IImportErrorStore + { + private IIntegrationDataStoreClient _integrationDataStoreClient; + private Uri _fileUri; + private RecyclableMemoryStreamManager _recyclableMemoryStreamManager; + + public ImportErrorStore(IIntegrationDataStoreClient integrationDataStoreClient, Uri fileUri) + { + EnsureArg.IsNotNull(integrationDataStoreClient, nameof(integrationDataStoreClient)); + EnsureArg.IsNotNull(fileUri, nameof(fileUri)); + + _integrationDataStoreClient = integrationDataStoreClient; + _fileUri = fileUri; + + _recyclableMemoryStreamManager = new RecyclableMemoryStreamManager(); + } + + public string ErrorFileLocation => _fileUri.ToString(); + + /// + /// Upload error logs to store. Append to existed error file. + /// + /// New import errors + /// Cancellaltion Token + public async Task UploadErrorsAsync(string[] importErrors, CancellationToken cancellationToken) + { + if (importErrors == null || importErrors.Length == 0) + { + return; + } + + using var stream = new RecyclableMemoryStream(_recyclableMemoryStreamManager); + using StreamWriter writer = new StreamWriter(stream); + + foreach (string error in importErrors) + { + await writer.WriteLineAsync(error); + } + + await writer.FlushAsync(); + stream.Position = 0; + + string blockId = Convert.ToBase64String(Guid.NewGuid().ToByteArray()); + await _integrationDataStoreClient.UploadBlockAsync(_fileUri, stream, blockId, cancellationToken); + await _integrationDataStoreClient.AppendCommitAsync(_fileUri, new string[] { blockId }, cancellationToken); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStoreFactory.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStoreFactory.cs new file mode 100644 index 0000000000..ccdea05f19 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStoreFactory.cs @@ -0,0 +1,34 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Threading; +using System.Threading.Tasks; +using EnsureThat; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportErrorStoreFactory : IImportErrorStoreFactory + { + private const string LogContainerName = "fhirlogs"; + + private IIntegrationDataStoreClient _integrationDataStoreClient; + + public ImportErrorStoreFactory(IIntegrationDataStoreClient integrationDataStoreClient) + { + EnsureArg.IsNotNull(integrationDataStoreClient, nameof(integrationDataStoreClient)); + + _integrationDataStoreClient = integrationDataStoreClient; + } + + public async Task InitializeAsync(string fileName, CancellationToken cancellationToken) + { + EnsureArg.IsNotNullOrEmpty(fileName, nameof(fileName)); + + Uri fileUri = await _integrationDataStoreClient.PrepareResourceAsync(LogContainerName, fileName, cancellationToken); + return new ImportErrorStore(_integrationDataStoreClient, fileUri); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFailedOperationOutcome.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFailedOperationOutcome.cs new file mode 100644 index 0000000000..e4c664259a --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFailedOperationOutcome.cs @@ -0,0 +1,39 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using Newtonsoft.Json; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportFailedOperationOutcome + { + /// + /// Resource Type + /// + [JsonProperty("type")] + public string Type { get; set; } + + /// + /// Processing resource count. + /// + [JsonProperty("count")] + public long Count { get; set; } + + /// + /// Input resource url. + /// + [JsonProperty("inputUrl")] + public Uri InputUrl { get; set; } + + /// + /// Extension detail file. + /// +#pragma warning disable CA1056 // URI-like properties should not be strings + [JsonProperty("url")] + public string Url { get; set; } +#pragma warning restore CA1056 // URI-like properties should not be strings + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFileEtagNotMatchException.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFileEtagNotMatchException.cs new file mode 100644 index 0000000000..c278a4a0bb --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFileEtagNotMatchException.cs @@ -0,0 +1,24 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Diagnostics; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportFileEtagNotMatchException : Exception + { + public ImportFileEtagNotMatchException(string message) + : base(message, null) + { + } + + public ImportFileEtagNotMatchException(string message, Exception innerException) + : base(message, innerException) + { + Debug.Assert(!string.IsNullOrEmpty(message), "Exception message should not be empty."); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOperationOutcome.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOperationOutcome.cs new file mode 100644 index 0000000000..01f9314341 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOperationOutcome.cs @@ -0,0 +1,31 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using Newtonsoft.Json; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportOperationOutcome + { + /// + /// Resource Type + /// + [JsonProperty("type")] + public string Type { get; set; } + + /// + /// Processing resource count. + /// + [JsonProperty("count")] + public long Count { get; set; } + + /// + /// Input resource url. + /// + [JsonProperty("inputUrl")] + public Uri InputUrl { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTask.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTask.cs new file mode 100644 index 0000000000..50be3b1860 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTask.cs @@ -0,0 +1,518 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Threading; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Primitives; +using Microsoft.Health.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Features.Context; +using Microsoft.Health.TaskManagement; +using Newtonsoft.Json; +using TaskStatus = Microsoft.Health.TaskManagement.TaskStatus; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportOrchestratorTask : ITask + { + public const short ImportOrchestratorTaskId = 2; + + private const int DefaultPollingFrequencyInSeconds = 3; + private const long DefaultResourceSizePerByte = 64; + + private ImportOrchestratorTaskInputData _orchestratorInputData; + private RequestContextAccessor _contextAccessor; + private ImportOrchestratorTaskContext _orchestratorTaskContext; + private ITaskManager _taskManager; + private ISequenceIdGenerator _sequenceIdGenerator; + private IImportOrchestratorTaskDataStoreOperation _importOrchestratorTaskDataStoreOperation; + private IContextUpdater _contextUpdater; + private ILogger _logger; + private IIntegrationDataStoreClient _integrationDataStoreClient; + private CancellationTokenSource _cancellationTokenSource = new CancellationTokenSource(); + private List<(Uri resourceUri, TaskInfo taskInfo)> _runningTasks = new List<(Uri resourceUri, TaskInfo taskInfo)>(); + + public ImportOrchestratorTask( + ImportOrchestratorTaskInputData orchestratorInputData, + ImportOrchestratorTaskContext orchestratorTaskContext, + ITaskManager taskManager, + ISequenceIdGenerator sequenceIdGenerator, + IContextUpdater contextUpdater, + RequestContextAccessor contextAccessor, + IImportOrchestratorTaskDataStoreOperation importOrchestratorTaskDataStoreOperation, + IIntegrationDataStoreClient integrationDataStoreClient, + ILoggerFactory loggerFactory) + { + EnsureArg.IsNotNull(orchestratorInputData, nameof(orchestratorInputData)); + EnsureArg.IsNotNull(orchestratorTaskContext, nameof(orchestratorTaskContext)); + EnsureArg.IsNotNull(taskManager, nameof(taskManager)); + EnsureArg.IsNotNull(sequenceIdGenerator, nameof(sequenceIdGenerator)); + EnsureArg.IsNotNull(contextUpdater, nameof(contextUpdater)); + EnsureArg.IsNotNull(contextAccessor, nameof(contextAccessor)); + EnsureArg.IsNotNull(importOrchestratorTaskDataStoreOperation, nameof(importOrchestratorTaskDataStoreOperation)); + EnsureArg.IsNotNull(integrationDataStoreClient, nameof(integrationDataStoreClient)); + EnsureArg.IsNotNull(loggerFactory, nameof(loggerFactory)); + + _orchestratorInputData = orchestratorInputData; + _orchestratorTaskContext = orchestratorTaskContext; + _taskManager = taskManager; + _sequenceIdGenerator = sequenceIdGenerator; + _contextUpdater = contextUpdater; + _contextAccessor = contextAccessor; + _importOrchestratorTaskDataStoreOperation = importOrchestratorTaskDataStoreOperation; + _integrationDataStoreClient = integrationDataStoreClient; + _logger = loggerFactory.CreateLogger(); + } + + public string RunId { get; set; } + + public int PollingFrequencyInSeconds { get; set; } = DefaultPollingFrequencyInSeconds; + + public async Task ExecuteAsync() + { + var fhirRequestContext = new FhirRequestContext( + method: "Import", + uriString: _orchestratorInputData.RequestUri.ToString(), + baseUriString: _orchestratorInputData.BaseUri.ToString(), + correlationId: _orchestratorInputData.TaskId, + requestHeaders: new Dictionary(), + responseHeaders: new Dictionary()) + { + IsBackgroundTask = true, + }; + + _contextAccessor.RequestContext = fhirRequestContext; + + CancellationToken cancellationToken = _cancellationTokenSource.Token; + + TaskResultData taskResultData = null; + ImportTaskErrorResult errorResult = null; + try + { + if (cancellationToken.IsCancellationRequested) + { + throw new OperationCanceledException(); + } + + if (_orchestratorTaskContext.Progress == ImportOrchestratorTaskProgress.Initialized) + { + await ValidateResourcesAsync(cancellationToken); + + _orchestratorTaskContext.Progress = ImportOrchestratorTaskProgress.InputResourcesValidated; + await UpdateProgressAsync(_orchestratorTaskContext, cancellationToken); + + _logger.LogInformation("Input Resources Validated"); + } + + if (_orchestratorTaskContext.Progress == ImportOrchestratorTaskProgress.InputResourcesValidated) + { + await _importOrchestratorTaskDataStoreOperation.PreprocessAsync(cancellationToken); + + _orchestratorTaskContext.Progress = ImportOrchestratorTaskProgress.PreprocessCompleted; + await UpdateProgressAsync(_orchestratorTaskContext, cancellationToken); + + _logger.LogInformation("Preprocess Completed"); + } + + if (_orchestratorTaskContext.Progress == ImportOrchestratorTaskProgress.PreprocessCompleted) + { + _orchestratorTaskContext.DataProcessingTasks = await GenerateSubTaskRecordsAsync(cancellationToken); + _orchestratorTaskContext.Progress = ImportOrchestratorTaskProgress.SubTaskRecordsGenerated; + await UpdateProgressAsync(_orchestratorTaskContext, cancellationToken); + + _logger.LogInformation("SubTask Records Generated"); + } + + if (_orchestratorTaskContext.Progress == ImportOrchestratorTaskProgress.SubTaskRecordsGenerated) + { + _orchestratorTaskContext.ImportResult = await ExecuteDataProcessingTasksAsync(cancellationToken); + + _orchestratorTaskContext.Progress = ImportOrchestratorTaskProgress.SubTasksCompleted; + await UpdateProgressAsync(_orchestratorTaskContext, cancellationToken); + + _logger.LogInformation("SubTasks Completed"); + } + + _orchestratorTaskContext.ImportResult.TransactionTime = _orchestratorInputData.TaskCreateTime; + } + catch (TaskCanceledException taskCanceledEx) + { + _logger.LogInformation(taskCanceledEx, "Import task canceled. {0}", taskCanceledEx.Message); + + await CancelProcessingTasksAsync(); + taskResultData = new TaskResultData(TaskResult.Canceled, taskCanceledEx.Message); + } + catch (OperationCanceledException canceledEx) + { + _logger.LogInformation(canceledEx, "Import task canceled. {0}", canceledEx.Message); + + await CancelProcessingTasksAsync(); + taskResultData = new TaskResultData(TaskResult.Canceled, canceledEx.Message); + } + catch (IntegrationDataStoreException integrationDataStoreEx) + { + _logger.LogInformation(integrationDataStoreEx, "Failed to access input files."); + + errorResult = new ImportTaskErrorResult() + { + HttpStatusCode = integrationDataStoreEx.StatusCode, + ErrorMessage = integrationDataStoreEx.Message, + }; + + taskResultData = new TaskResultData(TaskResult.Fail, JsonConvert.SerializeObject(errorResult)); + } + catch (ImportFileEtagNotMatchException eTagEx) + { + _logger.LogInformation(eTagEx, "Import file etag not match."); + + errorResult = new ImportTaskErrorResult() + { + HttpStatusCode = HttpStatusCode.BadRequest, + ErrorMessage = eTagEx.Message, + }; + + taskResultData = new TaskResultData(TaskResult.Fail, JsonConvert.SerializeObject(errorResult)); + } + catch (ImportProcessingException processingEx) + { + _logger.LogInformation(processingEx, "Failed to process input resources."); + + errorResult = new ImportTaskErrorResult() + { + HttpStatusCode = HttpStatusCode.BadRequest, + ErrorMessage = processingEx.Message, + }; + + taskResultData = new TaskResultData(TaskResult.Fail, JsonConvert.SerializeObject(errorResult)); + } + catch (Exception ex) + { + _logger.LogInformation(ex, "Failed to import data."); + + errorResult = new ImportTaskErrorResult() + { + HttpStatusCode = HttpStatusCode.InternalServerError, + ErrorMessage = ex.Message, + }; + + throw new RetriableTaskException(JsonConvert.SerializeObject(errorResult)); + } + + if (_orchestratorTaskContext.Progress > ImportOrchestratorTaskProgress.InputResourcesValidated) + { + // Post-process operation cannot be cancelled. + try + { + await _importOrchestratorTaskDataStoreOperation.PostprocessAsync(CancellationToken.None); + + _logger.LogInformation("Postprocess Completed"); + } + catch (Exception ex) + { + _logger.LogInformation(ex, "Failed at postprocess step."); + + ImportTaskErrorResult postProcessEerrorResult = new ImportTaskErrorResult() + { + HttpStatusCode = HttpStatusCode.InternalServerError, + ErrorMessage = ex.Message, + + // other error if any. + InnerError = errorResult, + }; + + throw new RetriableTaskException(JsonConvert.SerializeObject(postProcessEerrorResult)); + } + } + + if (taskResultData == null) // No exception + { + taskResultData = new TaskResultData(TaskResult.Success, JsonConvert.SerializeObject(_orchestratorTaskContext.ImportResult)); + } + + return taskResultData; + } + + public void Cancel() + { + if (_cancellationTokenSource != null && !_cancellationTokenSource.IsCancellationRequested) + { + _cancellationTokenSource?.Cancel(); + } + } + + private static long CalculateResourceNumberByResourceSize(long blobSizeInBytes, long resourceCountPerBytes) + { + return Math.Max((blobSizeInBytes / resourceCountPerBytes) + 1, 10000L); + } + + private async Task ValidateResourcesAsync(CancellationToken cancellationToken) + { + foreach (var input in _orchestratorInputData.Input) + { + Dictionary properties = await _integrationDataStoreClient.GetPropertiesAsync(input.Url, cancellationToken); + if (!string.IsNullOrEmpty(input.Etag)) + { + if (!input.Etag.Equals(properties[IntegrationDataStoreClientConstants.BlobPropertyETag])) + { + throw new ImportFileEtagNotMatchException(string.Format("Input file Etag not match. {0}", input.Url)); + } + } + } + } + + private async Task UpdateProgressAsync(ImportOrchestratorTaskContext context, CancellationToken cancellationToken) + { + await _contextUpdater.UpdateContextAsync(JsonConvert.SerializeObject(context), cancellationToken); + } + + private async Task> GenerateSubTaskRecordsAsync(CancellationToken cancellationToken) + { + Dictionary result = new Dictionary(); + + long beginSequenceId = _sequenceIdGenerator.GetCurrentSequenceId(); + + foreach (var input in _orchestratorInputData.Input) + { + string taskId = Guid.NewGuid().ToString("N"); + + Dictionary properties = await _integrationDataStoreClient.GetPropertiesAsync(input.Url, cancellationToken); + long blobSizeInBytes = (long)properties[IntegrationDataStoreClientConstants.BlobPropertyLength]; + long estimatedResourceNumber = CalculateResourceNumberByResourceSize(blobSizeInBytes, DefaultResourceSizePerByte); + long endSequenceId = beginSequenceId + estimatedResourceNumber; + + ImportProcessingTaskInputData importTaskPayload = new ImportProcessingTaskInputData() + { + ResourceLocation = input.Url.ToString(), + UriString = _orchestratorInputData.RequestUri.ToString(), + BaseUriString = _orchestratorInputData.BaseUri.ToString(), + ResourceType = input.Type, + TaskId = taskId, + BeginSequenceId = beginSequenceId, + EndSequenceId = endSequenceId, + }; + + TaskInfo processingTask = new TaskInfo() + { + QueueId = _orchestratorInputData.ProcessingTaskQueueId, + TaskId = taskId, + TaskTypeId = ImportProcessingTask.ImportProcessingTaskId, + InputData = JsonConvert.SerializeObject(importTaskPayload), + MaxRetryCount = _orchestratorInputData.ProcessingTaskMaxRetryCount, + }; + + result[input.Url] = processingTask; + + beginSequenceId = endSequenceId; + } + + return result; + } + + private async Task ExecuteDataProcessingTasksAsync(CancellationToken cancellationToken) + { + List completedOperationOutcome = new List(); + List failedOperationOutcome = new List(); + + foreach ((Uri resourceUri, TaskInfo taskInfo) in _orchestratorTaskContext.DataProcessingTasks.ToArray()) + { + if (cancellationToken.IsCancellationRequested) + { + throw new OperationCanceledException(); + } + + while (_runningTasks.Count >= _orchestratorInputData.MaxConcurrentProcessingTaskCount) + { + List completedTaskResourceUris = await MonitorRunningTasksAsync(_runningTasks, cancellationToken); + + if (completedTaskResourceUris.Count > 0) + { + AddToResult(completedOperationOutcome, failedOperationOutcome, completedTaskResourceUris); + + _runningTasks.RemoveAll(t => completedTaskResourceUris.Contains(t.resourceUri)); + await UpdateProgressAsync(_orchestratorTaskContext, cancellationToken); + } + else + { + await Task.Delay(TimeSpan.FromSeconds(PollingFrequencyInSeconds), cancellationToken); + } + } + + TaskInfo taskInfoFromServer = await _taskManager.GetTaskAsync(taskInfo.TaskId, cancellationToken); + if (taskInfoFromServer == null) + { + taskInfoFromServer = await _taskManager.CreateTaskAsync(taskInfo, false, cancellationToken); + } + + _orchestratorTaskContext.DataProcessingTasks[resourceUri] = taskInfoFromServer; + if (taskInfoFromServer.Status != TaskStatus.Completed) + { + _runningTasks.Add((resourceUri, taskInfoFromServer)); + await Task.Delay(TimeSpan.FromSeconds(PollingFrequencyInSeconds), cancellationToken); + } + else + { + AddToResult(completedOperationOutcome, failedOperationOutcome, new List() { resourceUri }); + } + } + + while (_runningTasks.Count > 0) + { + List completedTaskResourceUris = await MonitorRunningTasksAsync(_runningTasks, cancellationToken); + + if (completedTaskResourceUris.Count > 0) + { + AddToResult(completedOperationOutcome, failedOperationOutcome, completedTaskResourceUris); + + _runningTasks.RemoveAll(t => completedTaskResourceUris.Contains(t.resourceUri)); + await UpdateProgressAsync(_orchestratorTaskContext, cancellationToken); + } + else + { + await Task.Delay(TimeSpan.FromSeconds(PollingFrequencyInSeconds), cancellationToken); + } + } + + return new ImportTaskResult() + { + Request = _orchestratorInputData.RequestUri.ToString(), + Output = completedOperationOutcome, + Error = failedOperationOutcome, + }; + } + + private void AddToResult(List completedOperationOutcome, List failedOperationOutcome, List completedTaskResourceUris) + { + foreach (Uri completedResourceUri in completedTaskResourceUris) + { + TaskInfo completeTaskInfo = _orchestratorTaskContext.DataProcessingTasks[completedResourceUri]; + TaskResultData taskResultData = JsonConvert.DeserializeObject(completeTaskInfo.Result); + + if (taskResultData.Result == TaskResult.Success) + { + ImportProcessingTaskResult procesingTaskResult = JsonConvert.DeserializeObject(taskResultData.ResultData); + completedOperationOutcome.Add(new ImportOperationOutcome() { Type = procesingTaskResult.ResourceType, Count = procesingTaskResult.SucceedCount, InputUrl = completedResourceUri }); + if (procesingTaskResult.FailedCount > 0) + { + failedOperationOutcome.Add(new ImportFailedOperationOutcome() { Type = procesingTaskResult.ResourceType, Count = procesingTaskResult.FailedCount, InputUrl = completedResourceUri, Url = procesingTaskResult.ErrorLogLocation }); + } + } + else if (taskResultData.Result == TaskResult.Fail) + { + throw new ImportProcessingException(string.Format("Failed to process file: {0}. {1}", completedResourceUri, taskResultData)); + } + else if (taskResultData.Result == TaskResult.Canceled) + { + throw new OperationCanceledException(taskResultData.ResultData); + } + } + } + + private async Task> MonitorRunningTasksAsync(List<(Uri resourceUri, TaskInfo taskInfo)> runningTasks, CancellationToken cancellationToken) + { + List completedTaskResourceUris = new List(); + + foreach ((Uri runningResourceUri, TaskInfo runningTaskInfo) in runningTasks) + { + if (cancellationToken.IsCancellationRequested) + { + throw new OperationCanceledException(); + } + + TaskInfo latestTaskInfo = await _taskManager.GetTaskAsync(runningTaskInfo.TaskId, cancellationToken); + + _orchestratorTaskContext.DataProcessingTasks[runningResourceUri] = latestTaskInfo; + if (latestTaskInfo.Status == TaskStatus.Completed) + { + completedTaskResourceUris.Add(runningResourceUri); + } + } + + return completedTaskResourceUris; + } + + private async Task CancelProcessingTasksAsync() + { + List runningTaskIds = new List(); + + if ((_orchestratorTaskContext?.DataProcessingTasks?.Count ?? 0) == 0) + { + // No data processing task created. + return; + } + + foreach (TaskInfo taskInfo in _orchestratorTaskContext.DataProcessingTasks.Values) + { + try + { + TaskInfo taskInfoFromServer = await _taskManager.GetTaskAsync(taskInfo.TaskId, CancellationToken.None); + + if (taskInfoFromServer != null) + { + await _taskManager.CancelTaskAsync(taskInfo.TaskId, CancellationToken.None); + runningTaskIds.Add(taskInfo.TaskId); + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "failed to cancel task {0}", taskInfo.TaskId); + } + } + + // Wait task cancel for WaitRunningTaskCancelTimeoutInSec + await WaitRunningTaskCompleteAsync(runningTaskIds); + } + + private async Task WaitRunningTaskCompleteAsync(List runningTaskIds) + { + while (true) + { + if (runningTaskIds.Count == 0) + { + break; + } + + string[] currentRunningTaskIds = runningTaskIds.ToArray(); + + foreach (string runningTaskId in currentRunningTaskIds) + { + try + { + TaskInfo taskInfo = await _taskManager.GetTaskAsync(runningTaskId, CancellationToken.None); + if (taskInfo == null || taskInfo.Status == TaskStatus.Completed) + { + runningTaskIds.Remove(runningTaskId); + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to get task info for canceled task {0}", runningTaskId); + } + } + + await Task.Delay(TimeSpan.FromSeconds(5)); + } + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + _cancellationTokenSource?.Dispose(); + _cancellationTokenSource = null; + } + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTaskContext.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTaskContext.cs new file mode 100644 index 0000000000..bbd0d7b21d --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTaskContext.cs @@ -0,0 +1,31 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using Microsoft.Health.TaskManagement; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportOrchestratorTaskContext + { + /// + /// Data processing task records. + /// +#pragma warning disable CA2227 // Need to update status during execution. + public IDictionary DataProcessingTasks { get; set; } = new Dictionary(); +#pragma warning restore CA2227 + + /// + /// Orchestrator task progress. + /// + public ImportOrchestratorTaskProgress Progress { get; set; } + + /// + /// Import result during execution + /// + public ImportTaskResult ImportResult { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTaskInputData.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTaskInputData.cs new file mode 100644 index 0000000000..cbff2fd90f --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTaskInputData.cs @@ -0,0 +1,72 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using Microsoft.Health.Fhir.Core.Features.Operations.Import.Models; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + /// + /// Import task input payload + /// + public class ImportOrchestratorTaskInputData + { + /// + /// Request Uri for the import operation + /// + public Uri RequestUri { get; set; } + + /// + /// Input format for the input resource: ndjson supported. + /// + public string InputFormat { get; set; } + + /// + /// Input sourece for the operation. + /// + public Uri InputSource { get; set; } + + /// + /// FHIR Base Uri + /// + public Uri BaseUri { get; set; } + + /// + /// Task id for the orchestrator task. + /// + public string TaskId { get; set; } + + /// + /// Input resource list + /// + public IReadOnlyList Input { get; set; } + + /// + /// Resource storage details. + /// + public ImportRequestStorageDetail StorageDetail { get; set; } + + /// + /// Max running sub data processing task count at the same time. + /// + public int MaxConcurrentProcessingTaskCount { get; set; } + + /// + /// Max retry count for processing task + /// + public short? ProcessingTaskMaxRetryCount { get; set; } + + /// + /// Sub processing task queue id. + /// + public string ProcessingTaskQueueId { get; set; } + + /// + /// Task create time. + /// + public DateTimeOffset TaskCreateTime { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTaskProgress.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTaskProgress.cs new file mode 100644 index 0000000000..4170810d2c --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorTaskProgress.cs @@ -0,0 +1,20 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + /// + /// Import orchestrator task progress enums. + /// + public enum ImportOrchestratorTaskProgress + { + Initialized, + InputResourcesValidated, + PreprocessCompleted, + SubTaskRecordsGenerated, + SubTasksCompleted, + PostprocessCompleted, + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingException.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingException.cs new file mode 100644 index 0000000000..1862f9fbbe --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingException.cs @@ -0,0 +1,24 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Diagnostics; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportProcessingException : Exception + { + public ImportProcessingException(string message) + : this(message, null) + { + } + + public ImportProcessingException(string message, Exception innerException) + : base(message, innerException) + { + Debug.Assert(!string.IsNullOrEmpty(message), "Exception message should not be empty."); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingProgress.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingProgress.cs new file mode 100644 index 0000000000..e914c08170 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingProgress.cs @@ -0,0 +1,30 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportProcessingProgress + { + /// + /// Succeed import resource count + /// + public long SucceedImportCount { get; set; } + + /// + /// Failed processing resource count + /// + public long FailedImportCount { get; set; } + + /// + /// Current index for last checkpoint + /// + public long CurrentIndex { get; set; } + + /// + /// Importer initialized status + /// + public bool NeedCleanData { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingTask.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingTask.cs new file mode 100644 index 0000000000..05f8611b5d --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingTask.cs @@ -0,0 +1,268 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Primitives; +using Microsoft.Health.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Features.Context; +using Microsoft.Health.TaskManagement; +using Newtonsoft.Json; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportProcessingTask : ITask + { + public const short ImportProcessingTaskId = 1; + + private ImportProcessingTaskInputData _inputData; + private ImportProcessingProgress _importProgress; + private IImportResourceLoader _importResourceLoader; + private IResourceBulkImporter _resourceBulkImporter; + private IImportErrorStoreFactory _importErrorStoreFactory; + private IContextUpdater _contextUpdater; + private RequestContextAccessor _contextAccessor; + private ILogger _logger; + private CancellationTokenSource _cancellationTokenSource = new CancellationTokenSource(); + + public ImportProcessingTask( + ImportProcessingTaskInputData inputData, + ImportProcessingProgress importProgress, + IImportResourceLoader importResourceLoader, + IResourceBulkImporter resourceBulkImporter, + IImportErrorStoreFactory importErrorStoreFactory, + IContextUpdater contextUpdater, + RequestContextAccessor contextAccessor, + ILoggerFactory loggerFactory) + { + EnsureArg.IsNotNull(inputData, nameof(inputData)); + EnsureArg.IsNotNull(importProgress, nameof(importProgress)); + EnsureArg.IsNotNull(importResourceLoader, nameof(importResourceLoader)); + EnsureArg.IsNotNull(resourceBulkImporter, nameof(resourceBulkImporter)); + EnsureArg.IsNotNull(importErrorStoreFactory, nameof(importErrorStoreFactory)); + EnsureArg.IsNotNull(contextUpdater, nameof(contextUpdater)); + EnsureArg.IsNotNull(contextAccessor, nameof(contextAccessor)); + EnsureArg.IsNotNull(loggerFactory, nameof(loggerFactory)); + + _inputData = inputData; + _importProgress = importProgress; + _importResourceLoader = importResourceLoader; + _resourceBulkImporter = resourceBulkImporter; + _importErrorStoreFactory = importErrorStoreFactory; + _contextUpdater = contextUpdater; + _contextAccessor = contextAccessor; + + _logger = loggerFactory.CreateLogger(); + } + + public string RunId { get; set; } + + public async Task ExecuteAsync() + { + var fhirRequestContext = new FhirRequestContext( + method: "Import", + uriString: _inputData.UriString, + baseUriString: _inputData.BaseUriString, + correlationId: _inputData.TaskId, + requestHeaders: new Dictionary(), + responseHeaders: new Dictionary()) + { + IsBackgroundTask = true, + }; + + _contextAccessor.RequestContext = fhirRequestContext; + + CancellationToken cancellationToken = _cancellationTokenSource.Token; + + long succeedImportCount = _importProgress.SucceedImportCount; + long failedImportCount = _importProgress.FailedImportCount; + + ImportProcessingTaskResult result = new ImportProcessingTaskResult(); + result.ResourceType = _inputData.ResourceType; + + try + { + if (cancellationToken.IsCancellationRequested) + { + throw new OperationCanceledException(); + } + + Func sequenceIdGenerator = (index) => _inputData.BeginSequenceId + index; + + // Clean resources before import start + await _resourceBulkImporter.CleanResourceAsync(_inputData, _importProgress, cancellationToken); + _importProgress.NeedCleanData = true; + await _contextUpdater.UpdateContextAsync(JsonConvert.SerializeObject(_importProgress), cancellationToken); + + // Initialize error store + IImportErrorStore importErrorStore = await _importErrorStoreFactory.InitializeAsync(GetErrorFileName(), cancellationToken); + result.ErrorLogLocation = importErrorStore.ErrorFileLocation; + + // Load and parse resource from bulk resource + (Channel importResourceChannel, Task loadTask) = _importResourceLoader.LoadResources(_inputData.ResourceLocation, _importProgress.CurrentIndex, _inputData.ResourceType, sequenceIdGenerator, cancellationToken); + + // Import to data store + (Channel progressChannel, Task importTask) = _resourceBulkImporter.Import(importResourceChannel, importErrorStore, cancellationToken); + + // Update progress for checkpoints + await foreach (ImportProcessingProgress progress in progressChannel.Reader.ReadAllAsync()) + { + if (cancellationToken.IsCancellationRequested) + { + throw new OperationCanceledException("Import task is canceled by user."); + } + + _importProgress.SucceedImportCount = progress.SucceedImportCount + succeedImportCount; + _importProgress.FailedImportCount = progress.FailedImportCount + failedImportCount; + _importProgress.CurrentIndex = progress.CurrentIndex; + result.SucceedCount = _importProgress.SucceedImportCount; + result.FailedCount = _importProgress.FailedImportCount; + + _logger.LogInformation("Import task progress: {0}", JsonConvert.SerializeObject(_importProgress)); + + try + { + await _contextUpdater.UpdateContextAsync(JsonConvert.SerializeObject(_importProgress), cancellationToken); + } + catch (Exception ex) + { + // ignore exception for progresss update + _logger.LogInformation(ex, "Failed to update context."); + } + } + + // Pop up exception during load & import + // Put import task before load task for resource channel full and blocking issue. + try + { + await importTask; + } + catch (TaskCanceledException) + { + throw; + } + catch (OperationCanceledException) + { + throw; + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to import data."); + throw new RetriableTaskException("Failed to import data.", ex); + } + + try + { + await loadTask; + } + catch (TaskCanceledException) + { + throw; + } + catch (OperationCanceledException) + { + throw; + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to load data."); + throw new RetriableTaskException("Failed to load data", ex); + } + + return new TaskResultData(TaskResult.Success, JsonConvert.SerializeObject(result)); + } + catch (TaskCanceledException canceledEx) + { + _logger.LogInformation(canceledEx, "Data processing task is canceled."); + + await CleanResourceForFailureAsync(canceledEx); + + return new TaskResultData(TaskResult.Canceled, JsonConvert.SerializeObject(result)); + } + catch (OperationCanceledException canceledEx) + { + _logger.LogInformation(canceledEx, "Data processing task is canceled."); + + await CleanResourceForFailureAsync(canceledEx); + + return new TaskResultData(TaskResult.Canceled, JsonConvert.SerializeObject(result)); + } + catch (RetriableTaskException retriableEx) + { + _logger.LogInformation(retriableEx, "Error in data processing task."); + + await CleanResourceForFailureAsync(retriableEx); + + throw; + } + catch (Exception ex) + { + _logger.LogInformation(ex, "Critical error in data processing task."); + + await CleanResourceForFailureAsync(ex); + + throw new RetriableTaskException(ex.Message); + } + finally + { + if (!_cancellationTokenSource.IsCancellationRequested) + { + _cancellationTokenSource.Cancel(); + } + } + } + + private async Task CleanResourceForFailureAsync(Exception failureException) + { + try + { + await _resourceBulkImporter.CleanResourceAsync(_inputData, _importProgress, CancellationToken.None); + } + catch (Exception ex) + { + _logger.LogInformation(ex, "Data processing task is canceled. Failed to clean resource."); + throw new RetriableTaskException(ex.Message, failureException); + } + } + + public void Cancel() + { + if (_cancellationTokenSource != null && !_cancellationTokenSource.IsCancellationRequested) + { + _cancellationTokenSource?.Cancel(); + } + } + + public bool IsCancelling() + { + return _cancellationTokenSource?.IsCancellationRequested ?? false; + } + + private string GetErrorFileName() + { + return $"{_inputData.ResourceType}{_inputData.TaskId}.ndjson"; + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + _cancellationTokenSource?.Dispose(); + _cancellationTokenSource = null; + } + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingTaskInputData.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingTaskInputData.cs new file mode 100644 index 0000000000..91c4e7c075 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingTaskInputData.cs @@ -0,0 +1,49 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportProcessingTaskInputData + { + /// + /// Resource location for the input file + /// + public string ResourceLocation { get; set; } + + /// + /// Request Uri string for the import operation + /// +#pragma warning disable CA1056 + public string UriString { get; set; } +#pragma warning restore CA1056 + + /// + /// FHIR base uri string. + /// +#pragma warning disable CA1056 + public string BaseUriString { get; set; } +#pragma warning restore CA1056 + + /// + /// FHIR resource type + /// + public string ResourceType { get; set; } + + /// + /// Data processing task id + /// + public string TaskId { get; set; } + + /// + /// Begine sequence id + /// + public long BeginSequenceId { get; set; } + + /// + /// End sequence id + /// + public long EndSequenceId { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingTaskResult.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingTaskResult.cs new file mode 100644 index 0000000000..57532a674e --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingTaskResult.cs @@ -0,0 +1,35 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportProcessingTaskResult + { + /// + /// FHIR resource type + /// + public string ResourceType { get; set; } + + /// + /// Succeed imported resource count + /// + public long SucceedCount { get; set; } + + /// + /// Failed processing resource count + /// + public long FailedCount { get; set; } + + /// + /// If any failure processing resource, error log would be uploaded. + /// + public string ErrorLogLocation { get; set; } + + /// + /// Critical error during data processing. + /// + public string ImportError { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportResource.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportResource.cs new file mode 100644 index 0000000000..15d3ca74cc --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportResource.cs @@ -0,0 +1,57 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.IO; +using Microsoft.Health.Fhir.Core.Features.Persistence; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportResource + { + public ImportResource(long id, long index, ResourceWrapper resource) + { + Id = id; + Index = index; + Resource = resource; + } + + public ImportResource(ResourceWrapper resource) + : this(0, 0, resource) + { + } + + public ImportResource(long id, long index, string importError) + { + Id = id; + Index = index; + ImportError = importError; + } + + /// + /// Resource index in the resource file + /// + public long Index { get; set; } + + /// + /// Resource sequence id + /// + public long Id { get; set; } + + /// + /// Resource wrapper from raw content + /// + public ResourceWrapper Resource { get; set; } + + /// + /// Processing error + /// + public string ImportError { get; set; } + + /// + /// Compressed raw resource stream + /// + public Stream CompressedStream { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportResourceLoader.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportResourceLoader.cs new file mode 100644 index 0000000000..5e62d1893b --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportResourceLoader.cs @@ -0,0 +1,180 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.Extensions.Logging; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportResourceLoader : IImportResourceLoader + { + private const int DefaultChannelMaxCapacity = 500; + private const int DefaultMaxBatchSize = 100; + private static readonly int MaxConcurrentCount = Environment.ProcessorCount * 2; + + private IIntegrationDataStoreClient _integrationDataStoreClient; + private IImportResourceParser _importResourceParser; + private IImportErrorSerializer _importErrorSerializer; + private ILogger _logger; + + public ImportResourceLoader( + IIntegrationDataStoreClient integrationDataStoreClient, + IImportResourceParser importResourceParser, + IImportErrorSerializer importErrorSerializer, + ILogger logger) + { + EnsureArg.IsNotNull(integrationDataStoreClient, nameof(integrationDataStoreClient)); + EnsureArg.IsNotNull(importResourceParser, nameof(importResourceParser)); + EnsureArg.IsNotNull(importErrorSerializer, nameof(importErrorSerializer)); + EnsureArg.IsNotNull(logger, nameof(logger)); + + _integrationDataStoreClient = integrationDataStoreClient; + _importResourceParser = importResourceParser; + _importErrorSerializer = importErrorSerializer; + _logger = logger; + } + + public int MaxBatchSize { get; set; } = DefaultMaxBatchSize; + + public int ChannelMaxCapacity { get; set; } = DefaultChannelMaxCapacity; + + public (Channel resourceChannel, Task loadTask) LoadResources(string resourceLocation, long startIndex, string resourceType, Func sequenceIdGenerator, CancellationToken cancellationToken) + { + EnsureArg.IsNotEmptyOrWhiteSpace(resourceLocation, nameof(resourceLocation)); + + Channel outputChannel = Channel.CreateBounded(ChannelMaxCapacity); + + Task loadTask = Task.Run(async () => await LoadResourcesInternalAsync(outputChannel, resourceLocation, startIndex, resourceType, sequenceIdGenerator, cancellationToken), cancellationToken); + + return (outputChannel, loadTask); + } + + private async Task LoadResourcesInternalAsync(Channel outputChannel, string resourceLocation, long startIndex, string resourceType, Func sequenceIdGenerator, CancellationToken cancellationToken) + { + string leaseId = null; + try + { + _logger.LogInformation("Start to load resource from store."); + + // Try to acquire lease to block change on the blob. + leaseId = await _integrationDataStoreClient.TryAcquireLeaseAsync(new Uri(resourceLocation), Guid.NewGuid().ToString("N"), cancellationToken); + + using Stream inputDataStream = _integrationDataStoreClient.DownloadResource(new Uri(resourceLocation), 0, cancellationToken); + using StreamReader inputDataReader = new StreamReader(inputDataStream); + + string content = null; + long currentIndex = 0; + List<(string content, long index)> buffer = new List<(string content, long index)>(); + Queue>> processingTasks = new Queue>>(); + + while (!string.IsNullOrEmpty(content = await inputDataReader.ReadLineAsync())) + { + if (cancellationToken.IsCancellationRequested) + { + throw new OperationCanceledException(); + } + + // TODO: improve to load from offset in file + if (currentIndex < startIndex) + { + currentIndex++; + continue; + } + + buffer.Add((content, currentIndex)); + currentIndex++; + + if (buffer.Count < MaxBatchSize) + { + continue; + } + + while (processingTasks.Count >= MaxConcurrentCount) + { + if (cancellationToken.IsCancellationRequested) + { + throw new OperationCanceledException(); + } + + IEnumerable importResources = await processingTasks.Dequeue(); + foreach (ImportResource importResource in importResources) + { + await outputChannel.Writer.WriteAsync(importResource, cancellationToken); + } + } + + processingTasks.Enqueue(ParseImportRawContentAsync(resourceType, buffer.ToArray(), sequenceIdGenerator)); + buffer.Clear(); + } + + processingTasks.Enqueue(ParseImportRawContentAsync(resourceType, buffer.ToArray(), sequenceIdGenerator)); + while (processingTasks.Count > 0) + { + if (cancellationToken.IsCancellationRequested) + { + throw new OperationCanceledException(); + } + + IEnumerable importResources = await processingTasks.Dequeue(); + foreach (ImportResource importResource in importResources) + { + await outputChannel.Writer.WriteAsync(importResource, cancellationToken); + } + } + + _logger.LogInformation($"{currentIndex} lines loaded."); + } + finally + { + outputChannel.Writer.Complete(); + + if (!string.IsNullOrEmpty(leaseId)) + { + await _integrationDataStoreClient.TryReleaseLeaseAsync(new Uri(resourceLocation), leaseId, cancellationToken); + } + + _logger.LogInformation("Load resource from store complete."); + } + } + + private async Task> ParseImportRawContentAsync(string resourceType, (string content, long index)[] rawContents, Func idGenerator) + { + return await Task.Run(() => + { + List result = new List(); + + foreach ((string content, long index) in rawContents) + { + long id = idGenerator(index); + + try + { + ImportResource importResource = _importResourceParser.Parse(id, index, content); + + if (!string.IsNullOrEmpty(resourceType) && !resourceType.Equals(importResource.Resource?.ResourceTypeName, StringComparison.Ordinal)) + { + throw new FormatException("Resource type not match."); + } + + result.Add(importResource); + } + catch (Exception ex) + { + // May contains customer's data, no error logs here. + result.Add(new ImportResource(id, index, _importErrorSerializer.Serialize(index, ex))); + } + } + + return result; + }); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportTaskErrorResult.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportTaskErrorResult.cs new file mode 100644 index 0000000000..a55891640d --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportTaskErrorResult.cs @@ -0,0 +1,27 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Net; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportTaskErrorResult + { + /// + /// Err http status code + /// + public HttpStatusCode HttpStatusCode { get; set; } + + /// + /// Details error message + /// + public string ErrorMessage { get; set; } + + /// + /// Inner error if there're multiple errors + /// + public ImportTaskErrorResult InnerError { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportTaskResult.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportTaskResult.cs new file mode 100644 index 0000000000..e9e24425d0 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportTaskResult.cs @@ -0,0 +1,38 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using Newtonsoft.Json; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportTaskResult + { + /// + /// Transaction time for import task created + /// + [JsonProperty("transactionTime")] + public DateTimeOffset TransactionTime { get; set; } + + /// + /// Request Uri for the import opearion + /// + [JsonProperty("request")] + public string Request { get; set; } + + /// + /// Operation output for the success imported resources + /// + [JsonProperty("output")] + public IReadOnlyCollection Output { get; set; } + + /// + /// Operation output for the failed imported resources + /// + [JsonProperty("error")] + public IReadOnlyCollection Error { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/Models/ImportRequest.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/Models/ImportRequest.cs new file mode 100644 index 0000000000..1f160c241e --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/Models/ImportRequest.cs @@ -0,0 +1,44 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import.Models +{ + public class ImportRequest + { + /// + /// Determines the format of the the input data. + /// + public string InputFormat { get; set; } + + /// + /// Determines the location of the source. + /// Should be a uri pointing to the source. + /// + public Uri InputSource { get; set; } + + /// + /// Determines the details of the input file that should be imported containing in the input source. + /// + public IReadOnlyList Input { get; set; } + + /// + /// Determines the details of the storage. + /// + public ImportRequestStorageDetail StorageDetail { get; set; } + + /// + /// Import operation mode + /// + public string Mode { get; set; } + + /// + /// Force import, ignore server status and import mode check + /// + public bool Force { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/Models/ImportRequestStorageDetail.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/Models/ImportRequestStorageDetail.cs new file mode 100644 index 0000000000..41a4760f14 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/Models/ImportRequestStorageDetail.cs @@ -0,0 +1,20 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import.Models +{ + public class ImportRequestStorageDetail + { + /// + /// Determines the types of the storage + /// + public string Type { get; set; } = "azure-blob"; + + /// + /// Determines the parameters of the storage depending on type + /// + public object Parameters { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/Models/InputResource.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/Models/InputResource.cs new file mode 100644 index 0000000000..7d9ce7b618 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/Models/InputResource.cs @@ -0,0 +1,28 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import.Models +{ + public class InputResource + { + /// + /// Determines the resource type of the input + /// + public string Type { get; set; } + + /// + /// Determines the location of the input data. + /// Should be a uri pointing to the input data. + /// + public Uri Url { get; set; } + + /// + /// Determines the etag of resource file. + /// + public string Etag { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ProgressRecord.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ProgressRecord.cs new file mode 100644 index 0000000000..865216c8a0 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ProgressRecord.cs @@ -0,0 +1,32 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ProgressRecord + { + public ProgressRecord() + { + } + + public ProgressRecord(long lastSurrogatedId) + { + LastSurrogatedId = lastSurrogatedId; + } + + public ProgressRecord(long lastSurrogatedId, long successResourceCount, long failResourceCount) + { + LastSurrogatedId = lastSurrogatedId; + SuccessResourceCount = successResourceCount; + FailResourceCount = failResourceCount; + } + + public long LastSurrogatedId { get; set; } + + public long SuccessResourceCount { get; set; } + + public long FailResourceCount { get; set; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/IntegrationDataStoreClientConstants.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/IntegrationDataStoreClientConstants.cs new file mode 100644 index 0000000000..ca46c8322c --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/IntegrationDataStoreClientConstants.cs @@ -0,0 +1,13 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +namespace Microsoft.Health.Fhir.Core.Features.Operations +{ + public static class IntegrationDataStoreClientConstants + { + public const string BlobPropertyETag = "ETag"; + public const string BlobPropertyLength = "Length"; + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/IntegrationDataStoreClientInitializerException.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/IntegrationDataStoreClientInitializerException.cs new file mode 100644 index 0000000000..e388032764 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/IntegrationDataStoreClientInitializerException.cs @@ -0,0 +1,24 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Diagnostics; +using System.Net; + +namespace Microsoft.Health.Fhir.Core.Features.Operations +{ + public class IntegrationDataStoreClientInitializerException : Exception + { + public IntegrationDataStoreClientInitializerException(string message, HttpStatusCode statusCode) + : base(message) + { + Debug.Assert(!string.IsNullOrEmpty(message), "Exception message should not be empty."); + + StatusCode = statusCode; + } + + public HttpStatusCode StatusCode { get; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/IntegrationDataStoreException.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/IntegrationDataStoreException.cs new file mode 100644 index 0000000000..f21892cbd7 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/IntegrationDataStoreException.cs @@ -0,0 +1,24 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Diagnostics; +using System.Net; + +namespace Microsoft.Health.Fhir.Core.Features.Operations +{ + public class IntegrationDataStoreException : Exception + { + public IntegrationDataStoreException(string message, HttpStatusCode statusCode) + : base(message) + { + Debug.Assert(!string.IsNullOrEmpty(message), "Exception message should not be empty."); + + StatusCode = statusCode; + } + + public HttpStatusCode StatusCode { get; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/OperationsConstants.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/OperationsConstants.cs index a7c79523d9..007ed932ad 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/OperationsConstants.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/OperationsConstants.cs @@ -34,5 +34,9 @@ public static class OperationsConstants public const string PatientEverythingUri = "https://www.hl7.org/fhir/patient-operation-everything.html"; public const string PurgeHistory = "purge-history"; + + public const string Import = "import"; + + public const string BulkImportContentTypeHeaderValue = "application/json"; } } diff --git a/src/Microsoft.Health.Fhir.Core/Features/Security/DataActions.cs b/src/Microsoft.Health.Fhir.Core/Features/Security/DataActions.cs index 2b04b7c124..aa62ca527d 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Security/DataActions.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Security/DataActions.cs @@ -24,8 +24,9 @@ public enum DataActions : ulong Reindex = 1 << 6, ConvertData = 1 << 7, EditProfileDefinitions = 1 << 8, // Allows to Create/Update/Delete resources related to profile's resources. + Import = 1 << 9, [EnumMember(Value = "*")] - All = (EditProfileDefinitions << 1) - 1, + All = (Import << 1) - 1, } } diff --git a/src/Microsoft.Health.Fhir.Core/Features/Security/roles.schema.json b/src/Microsoft.Health.Fhir.Core/Features/Security/roles.schema.json index e48b4be598..1e296cd0cb 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Security/roles.schema.json +++ b/src/Microsoft.Health.Fhir.Core/Features/Security/roles.schema.json @@ -13,6 +13,7 @@ "resourceValidate", "reindex", "convertData", + "import", "editProfileDefinitions" ] } diff --git a/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CancelImportRequest.cs b/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CancelImportRequest.cs new file mode 100644 index 0000000000..def0deaa36 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CancelImportRequest.cs @@ -0,0 +1,25 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using EnsureThat; +using MediatR; + +namespace Microsoft.Health.Fhir.Core.Messages.Import +{ + public class CancelImportRequest : IRequest + { + public CancelImportRequest(string taskId) + { + EnsureArg.IsNotNullOrWhiteSpace(taskId, nameof(taskId)); + + TaskId = taskId; + } + + /// + /// Import orchestrator task id + /// + public string TaskId { get; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CancelImportResponse.cs b/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CancelImportResponse.cs new file mode 100644 index 0000000000..fe84574dcc --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CancelImportResponse.cs @@ -0,0 +1,22 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Net; + +namespace Microsoft.Health.Fhir.Core.Messages.Import +{ + public class CancelImportResponse + { + public CancelImportResponse(HttpStatusCode statusCode) + { + StatusCode = statusCode; + } + + /// + /// Response status code + /// + public HttpStatusCode StatusCode { get; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CreateImportRequest.cs b/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CreateImportRequest.cs new file mode 100644 index 0000000000..9bfa47d0dc --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CreateImportRequest.cs @@ -0,0 +1,57 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using EnsureThat; +using MediatR; +using Microsoft.Health.Fhir.Core.Features.Operations.Import.Models; + +namespace Microsoft.Health.Fhir.Core.Messages.Import +{ + public class CreateImportRequest : IRequest + { + public CreateImportRequest( + Uri requestUri, + string inputFormat, + Uri inputSource, + IReadOnlyList input, + ImportRequestStorageDetail storageDetail) + { + EnsureArg.IsNotNull(requestUri, nameof(requestUri)); + + RequestUri = requestUri; + InputFormat = inputFormat; + InputSource = inputSource; + Input = input; + StorageDetail = storageDetail; + } + + /// + /// Import request uri + /// + public Uri RequestUri { get; } + + /// + /// Input resource file format. + /// + public string InputFormat { get; } + + /// + /// Input resource + /// + public Uri InputSource { get; } + + /// + /// Input resource list + /// + public IReadOnlyList Input { get; } + + /// + /// Storage details + /// + public ImportRequestStorageDetail StorageDetail { get; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CreateImportResponse.cs b/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CreateImportResponse.cs new file mode 100644 index 0000000000..f428046f2d --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/CreateImportResponse.cs @@ -0,0 +1,24 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using EnsureThat; + +namespace Microsoft.Health.Fhir.Core.Messages.Import +{ + public class CreateImportResponse + { + public CreateImportResponse(string taskId) + { + EnsureArg.IsNotNullOrWhiteSpace(taskId, nameof(taskId)); + + TaskId = taskId; + } + + /// + /// Created task id + /// + public string TaskId { get; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/GetImportRequest.cs b/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/GetImportRequest.cs new file mode 100644 index 0000000000..9fb6986cfb --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/GetImportRequest.cs @@ -0,0 +1,25 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using EnsureThat; +using MediatR; + +namespace Microsoft.Health.Fhir.Core.Messages.Import +{ + public class GetImportRequest : IRequest + { + public GetImportRequest(string taskId) + { + EnsureArg.IsNotNullOrWhiteSpace(taskId, nameof(taskId)); + + TaskId = taskId; + } + + /// + /// Import task id + /// + public string TaskId { get; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/GetImportResponse.cs b/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/GetImportResponse.cs new file mode 100644 index 0000000000..db466d0665 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Messages/BulkImport/GetImportResponse.cs @@ -0,0 +1,34 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Net; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; + +namespace Microsoft.Health.Fhir.Core.Messages.Import +{ + public class GetImportResponse + { + public GetImportResponse(HttpStatusCode statusCode) + : this(statusCode, taskResult: null) + { + } + + public GetImportResponse(HttpStatusCode statusCode, ImportTaskResult taskResult) + { + StatusCode = statusCode; + TaskResult = taskResult; + } + + /// + /// Response http status + /// + public HttpStatusCode StatusCode { get; } + + /// + /// Response result + /// + public ImportTaskResult TaskResult { get; } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Resources.Designer.cs b/src/Microsoft.Health.Fhir.Core/Resources.Designer.cs index 2eb0a0b525..f8a169ce93 100644 --- a/src/Microsoft.Health.Fhir.Core/Resources.Designer.cs +++ b/src/Microsoft.Health.Fhir.Core/Resources.Designer.cs @@ -493,6 +493,33 @@ internal static string IllegalHtmlParsingError { } } + /// + /// Looks up a localized string similar to Import operation has already completed. + /// + internal static string ImportOperationCompleted { + get { + return ResourceManager.GetString("ImportOperationCompleted", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to There is already an import task running.. + /// + internal static string ImportTaskIsRunning { + get { + return ResourceManager.GetString("ImportTaskIsRunning", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to The requested import task "{0}" was not found.. + /// + internal static string ImportTaskNotFound { + get { + return ResourceManager.GetString("ImportTaskNotFound", resourceCulture); + } + } + /// /// Looks up a localized string similar to The _include search cannot be used against the base route.. /// diff --git a/src/Microsoft.Health.Fhir.Core/Resources.resx b/src/Microsoft.Health.Fhir.Core/Resources.resx index 95487f96c8..079f4e81a4 100644 --- a/src/Microsoft.Health.Fhir.Core/Resources.resx +++ b/src/Microsoft.Health.Fhir.Core/Resources.resx @@ -578,6 +578,16 @@ The phase '{0}' in $everything operation is invalid. + + There is already an import task running. + + + The requested import task "{0}" was not found. + {0} is the import task id. + + + Import operation has already completed + The operation was stopped due to the underlying request being too resource intensive. If possible, try narrowing or changing the criteria. diff --git a/src/Microsoft.Health.Fhir.R4.Client/Microsoft.Health.Fhir.R4.Client.csproj b/src/Microsoft.Health.Fhir.R4.Client/Microsoft.Health.Fhir.R4.Client.csproj index 9377e19cfc..60422c26ee 100644 --- a/src/Microsoft.Health.Fhir.R4.Client/Microsoft.Health.Fhir.R4.Client.csproj +++ b/src/Microsoft.Health.Fhir.R4.Client/Microsoft.Health.Fhir.R4.Client.csproj @@ -12,5 +12,8 @@ + + + diff --git a/src/Microsoft.Health.Fhir.R4.Web/Properties/launchSettings.json b/src/Microsoft.Health.Fhir.R4.Web/Properties/launchSettings.json index 0771fd97d9..8e9553f230 100644 --- a/src/Microsoft.Health.Fhir.R4.Web/Properties/launchSettings.json +++ b/src/Microsoft.Health.Fhir.R4.Web/Properties/launchSettings.json @@ -27,6 +27,10 @@ "TestAuthEnvironment:FilePath": "..//..//testauthenvironment.json", "SqlServer:ConnectionString": "server=(local);Initial Catalog=FHIR_R4;Integrated Security=true", "DataStore": "SqlServer", + "TaskHosting:Enabled": "true", + "TaskHosting:MaxRunningTaskCount": "2", + "FhirServer:Operations:IntegrationDataStore:StorageAccountConnection": "UseDevelopmentStorage=true", + "FhirServer:Operations:Import:Enabled": "true", "ASPNETCORE_ENVIRONMENT": "development" }, "applicationUrl": "https://localhost:44348/" diff --git a/src/Microsoft.Health.Fhir.R5.Client/Microsoft.Health.Fhir.R5.Client.csproj b/src/Microsoft.Health.Fhir.R5.Client/Microsoft.Health.Fhir.R5.Client.csproj index 764d7e97e3..196a83d224 100644 --- a/src/Microsoft.Health.Fhir.R5.Client/Microsoft.Health.Fhir.R5.Client.csproj +++ b/src/Microsoft.Health.Fhir.R5.Client/Microsoft.Health.Fhir.R5.Client.csproj @@ -12,5 +12,8 @@ + + + diff --git a/src/Microsoft.Health.Fhir.R5.Web/Microsoft.Health.Fhir.R5.Web.csproj b/src/Microsoft.Health.Fhir.R5.Web/Microsoft.Health.Fhir.R5.Web.csproj index 46fab4cb67..1a81918da7 100644 --- a/src/Microsoft.Health.Fhir.R5.Web/Microsoft.Health.Fhir.R5.Web.csproj +++ b/src/Microsoft.Health.Fhir.R5.Web/Microsoft.Health.Fhir.R5.Web.csproj @@ -24,6 +24,7 @@ + diff --git a/src/Microsoft.Health.Fhir.R5.Web/Properties/launchSettings.json b/src/Microsoft.Health.Fhir.R5.Web/Properties/launchSettings.json index c643629af4..37b6318976 100644 --- a/src/Microsoft.Health.Fhir.R5.Web/Properties/launchSettings.json +++ b/src/Microsoft.Health.Fhir.R5.Web/Properties/launchSettings.json @@ -27,6 +27,10 @@ "TestAuthEnvironment:FilePath": "..//..//testauthenvironment.json", "SqlServer:ConnectionString": "server=(local);Initial Catalog=FHIR_R5;Integrated Security=true", "DataStore": "SqlServer", + "TaskHosting:Enabled": "true", + "TaskHosting:MaxRunningTaskCount": "2", + "FhirServer:Operations:IntegrationDataStore:StorageAccountConnection": "UseDevelopmentStorage=true", + "FhirServer:Operations:Import:Enabled": "true", "ASPNETCORE_ENVIRONMENT": "development" }, "applicationUrl": "https://localhost:44348/" diff --git a/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Controllers/ImportControllerTests.cs b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Controllers/ImportControllerTests.cs new file mode 100644 index 0000000000..ae69cfd1e2 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Controllers/ImportControllerTests.cs @@ -0,0 +1,204 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using MediatR; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Microsoft.Health.Core.Features.Context; +using Microsoft.Health.Fhir.Api.Configs; +using Microsoft.Health.Fhir.Api.Controllers; +using Microsoft.Health.Fhir.Api.Features.Operations.Import; +using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Exceptions; +using Microsoft.Health.Fhir.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.Core.Features.Operations.Import.Models; +using Microsoft.Health.Fhir.Core.Features.Routing; +using Microsoft.Health.Fhir.Core.Messages.Import; +using NSubstitute; +using Xunit; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Api.UnitTests.Controllers +{ + public class ImportControllerTests + { + private IMediator _mediator = Substitute.For(); + private RequestContextAccessor _fhirRequestContextAccessor = Substitute.For>(); + private IUrlResolver _urlResolver = Substitute.For(); + + public static TheoryData ValidBody => + new TheoryData + { + GetValidBulkImportRequestConfiguration(), + }; + + public static TheoryData InValidBody => + new TheoryData + { + GetBulkImportRequestConfigurationWithUnsupportedInputFormat(), + GetBulkImportRequestConfigurationWithUnsupportedStorageType(), + GetBulkImportRequestConfigurationWithUnsupportedResourceType(), + GetBulkImportRequestConfigurationWithNoInputFile(), + }; + + [Theory] + [MemberData(nameof(ValidBody), MemberType = typeof(ImportControllerTests))] + public async Task GivenAnBulkImportRequest_WhenDisabled_ThenRequestNotValidExceptionShouldBeThrown(ImportRequest body) + { + var bulkImportController = GetController(new ImportTaskConfiguration() { Enabled = false }); + + body.Mode = ImportConstants.InitialLoadMode; + await Assert.ThrowsAsync(() => bulkImportController.Import(body.ToParameters())); + } + + [Theory] + [MemberData(nameof(InValidBody), MemberType = typeof(ImportControllerTests))] + public async Task GivenAnBulkImportRequest_WhenRequestConfigurationNotValid_ThenRequestNotValidExceptionShouldBeThrown(ImportRequest body) + { + var bulkImportController = GetController(new ImportTaskConfiguration() { Enabled = true }); + + body.Mode = ImportConstants.InitialLoadMode; + await Assert.ThrowsAsync(() => bulkImportController.Import(body.ToParameters())); + } + + [Theory] + [MemberData(nameof(ValidBody), MemberType = typeof(ImportControllerTests))] + public async Task GivenAnBulkImportRequest_WhenRequestWithoutMode_ThenRequestNotValidExceptionShouldBeThrown(ImportRequest body) + { + var bulkImportController = GetController(new ImportTaskConfiguration() { Enabled = true }); + + await Assert.ThrowsAsync(() => bulkImportController.Import(body.ToParameters())); + } + + private static CreateImportResponse CreateBulkImportResponse() + { + return new CreateImportResponse("123"); + } + + private ImportController GetController(ImportTaskConfiguration bulkImportConfig) + { + var operationConfig = new OperationsConfiguration() + { + Import = bulkImportConfig, + }; + + IOptions optionsOperationConfiguration = Substitute.For>(); + optionsOperationConfiguration.Value.Returns(operationConfig); + + var features = new FeatureConfiguration(); + IOptions optionsFeatures = Substitute.For>(); + optionsFeatures.Value.Returns(features); + + return new ImportController( + _mediator, + _fhirRequestContextAccessor, + _urlResolver, + optionsOperationConfiguration, + optionsFeatures, + NullLogger.Instance); + } + + private static ImportRequest GetValidBulkImportRequestConfiguration() + { + var input = new List + { + new InputResource + { + Type = "Patient", + Url = new Uri("https://client.example.org/patient_file_2.ndjson?sig=RHIX5Xcg0Mq2rqI3OlWT"), + }, + new InputResource + { + Type = "Observation", + Url = new Uri("https://client.example.org/obseration_file_19.ndjson?sig=RHIX5Xcg0Mq2rqI3OlWT"), + }, + }; + + var importRequest = new ImportRequest(); + importRequest.InputFormat = "application/fhir+ndjson"; + importRequest.InputSource = new Uri("https://other-server.example.org"); + importRequest.Input = input; + importRequest.StorageDetail = new ImportRequestStorageDetail(); + + return importRequest; + } + + private static ImportRequest GetBulkImportRequestConfigurationWithUnsupportedInputFormat() + { + var input = new List + { + new InputResource + { + Type = "Patient", + Url = new Uri("https://client.example.org/patient_file_2.ndjson?sig=RHIX5Xcg0Mq2rqI3OlWT"), + }, + }; + + var bulkImportRequestConfiguration = new ImportRequest(); + bulkImportRequestConfiguration.InputFormat = "application/json"; + bulkImportRequestConfiguration.InputSource = new Uri("https://other-server.example.org"); + bulkImportRequestConfiguration.Input = input; + + return bulkImportRequestConfiguration; + } + + private static ImportRequest GetBulkImportRequestConfigurationWithUnsupportedStorageType() + { + var input = new List + { + new InputResource + { + Type = "Patient", + Url = new Uri("https://client.example.org/patient_file_2.ndjson?sig=RHIX5Xcg0Mq2rqI3OlWT"), + }, + }; + + var bulkImportRequestConfiguration = new ImportRequest(); + bulkImportRequestConfiguration.InputFormat = "application/fhir+ndjson"; + bulkImportRequestConfiguration.InputSource = new Uri("https://other-server.example.org"); + bulkImportRequestConfiguration.Input = input; + bulkImportRequestConfiguration.StorageDetail = new ImportRequestStorageDetail + { + Type = "Fake", + }; + + return bulkImportRequestConfiguration; + } + + private static ImportRequest GetBulkImportRequestConfigurationWithUnsupportedResourceType() + { + var input = new List + { + new InputResource + { + Type = "Fake", + Url = new Uri("https://client.example.org/patient_file_2.ndjson?sig=RHIX5Xcg0Mq2rqI3OlWT"), + }, + }; + + var bulkImportRequestConfiguration = new ImportRequest(); + bulkImportRequestConfiguration.InputFormat = "application/fhir+ndjson"; + bulkImportRequestConfiguration.InputSource = new Uri("https://other-server.example.org"); + bulkImportRequestConfiguration.Input = input; + + return bulkImportRequestConfiguration; + } + + private static ImportRequest GetBulkImportRequestConfigurationWithNoInputFile() + { + var input = new List(); + + var bulkImportRequestConfiguration = new ImportRequest(); + bulkImportRequestConfiguration.InputFormat = "application/fhir+ndjson"; + bulkImportRequestConfiguration.InputSource = new Uri("https://other-server.example.org"); + bulkImportRequestConfiguration.Input = input; + + return bulkImportRequestConfiguration; + } + } +} diff --git a/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Filters/FilterTestsHelper.cs b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Filters/FilterTestsHelper.cs index 8204791936..b5036a4b32 100644 --- a/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Filters/FilterTestsHelper.cs +++ b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Filters/FilterTestsHelper.cs @@ -28,5 +28,13 @@ public static ExportController CreateMockExportController() Options.Create(new OperationsConfiguration()), Options.Create(new FeatureConfiguration())); } + + public static ImportController CreateMockBulkImportController() + { + return Mock.TypeWithArguments( + new FhirRequestContextAccessor(), + Options.Create(new OperationsConfiguration()), + Options.Create(new FeatureConfiguration())); + } } } diff --git a/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Filters/ValidateBulkImportRequestFilterAttributeTests.cs b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Filters/ValidateBulkImportRequestFilterAttributeTests.cs new file mode 100644 index 0000000000..d49be1fedc --- /dev/null +++ b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Filters/ValidateBulkImportRequestFilterAttributeTests.cs @@ -0,0 +1,174 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.Mvc.Abstractions; +using Microsoft.AspNetCore.Mvc.Filters; +using Microsoft.AspNetCore.Routing; +using Microsoft.Health.Fhir.Api.Features.Filters; +using Microsoft.Health.Fhir.Core.Exceptions; +using Microsoft.Net.Http.Headers; +using Xunit; + +namespace Microsoft.Health.Fhir.Api.UnitTests.Features.Filters +{ + public class ValidateBulkImportRequestFilterAttributeTests + { + private const string CorrectPreferHeaderValue = "respond-async"; + private const string CorrectContentTypeHeaderValue = "application/fhir+json"; + private const string PreferHeaderName = "Prefer"; + + private readonly ValidateImportRequestFilterAttribute _filter; + + public ValidateBulkImportRequestFilterAttributeTests() + { + _filter = new ValidateImportRequestFilterAttribute(); + } + + [Theory] + [InlineData("respond-async, wait = 10")] + [InlineData("return-content")] + [InlineData("*")] + public void GiveARequestWithInvalidPreferHeader_WhenGettingABulkImportOperationRequest_ThenARequestNotValidExceptionShouldBeThrown(string preferHeader) + { + var context = CreateContext(); + context.HttpContext.Request.Method = "GET"; + context.HttpContext.Request.Headers.Add(PreferHeaderName, preferHeader); + context.HttpContext.Request.Headers.Add(HeaderNames.ContentType, CorrectContentTypeHeaderValue); + + Assert.Throws(() => _filter.OnActionExecuting(context)); + } + + [Theory] + [InlineData("respond-async, wait = 10")] + [InlineData("return-content")] + [InlineData("*")] + public void GiveARequestWithInvalidPreferHeader_WhenCreatingABulkImportRequest_ThenARequestNotValidExceptionShouldBeThrown(string preferHeader) + { + var context = CreateContext(); + context.HttpContext.Request.Method = "POST"; + context.HttpContext.Request.Headers.Add(PreferHeaderName, preferHeader); + context.HttpContext.Request.Headers.Add(HeaderNames.ContentType, CorrectContentTypeHeaderValue); + + Assert.Throws(() => _filter.OnActionExecuting(context)); + } + + [Theory] + [InlineData("respond-async, wait = 10")] + [InlineData("return-content")] + [InlineData("*")] + public void GiveARequestWithInvalidPreferHeader_WhenCancelABulkImportRequest_ThenARequestNotValidExceptionShouldBeThrown(string preferHeader) + { + var context = CreateContext(); + context.HttpContext.Request.Method = "DELETE"; + context.HttpContext.Request.Headers.Add(PreferHeaderName, preferHeader); + context.HttpContext.Request.Headers.Add(HeaderNames.ContentType, CorrectContentTypeHeaderValue); + + Assert.Throws(() => _filter.OnActionExecuting(context)); + } + + [Fact] + public void GivenARequestWithNoPreferHeader_WhenGettingABulkImportOperationRequest_ThenARequestNotValidExceptionShouldBeThrown() + { + var context = CreateContext(); + context.HttpContext.Request.Method = "GET"; + context.HttpContext.Request.Headers.Add(HeaderNames.ContentType, CorrectContentTypeHeaderValue); + + Assert.Throws(() => _filter.OnActionExecuting(context)); + } + + [Fact] + public void GivenARequestWithNoPreferHeader_WhenCreatingABulkImportRequest_ThenARequestNotValidExceptionShouldBeThrown() + { + var context = CreateContext(); + context.HttpContext.Request.Method = "POST"; + context.HttpContext.Request.Headers.Add(HeaderNames.ContentType, CorrectContentTypeHeaderValue); + + Assert.Throws(() => _filter.OnActionExecuting(context)); + } + + [Fact] + public void GivenARequestWithNoPreferHeader_WhenCancelABulkImportRequest_ThenARequestNotValidExceptionShouldBeThrown() + { + var context = CreateContext(); + context.HttpContext.Request.Method = "DELETE"; + context.HttpContext.Request.Headers.Add(HeaderNames.ContentType, CorrectContentTypeHeaderValue); + + Assert.Throws(() => _filter.OnActionExecuting(context)); + } + + [Theory] + [InlineData("multipart/form-data")] + [InlineData("text/plain")] + [InlineData("text/html")] + [InlineData("text/xml")] + [InlineData("application/xhtml+xml")] + [InlineData("application/xml")] + [InlineData("*")] + public void GiveARequestWithInvalidContentTypeHeader_WhenCreatingABulkImportRequest_ThenARequestNotValidExceptionShouldBeThrown(string contentTypeHeader) + { + var context = CreateContext(); + context.HttpContext.Request.Method = "POST"; + context.HttpContext.Request.Headers.Add(PreferHeaderName, CorrectPreferHeaderValue); + context.HttpContext.Request.Headers.Add(HeaderNames.ContentType, contentTypeHeader); + + Assert.Throws(() => _filter.OnActionExecuting(context)); + } + + [Fact] + public void GivenARequestWithNoContentTypeHeader_WhenCreatingABulkImportRequest_ThenARequestNotValidExceptionShouldBeThrown() + { + var context = CreateContext(); + context.HttpContext.Request.Method = "POST"; + context.HttpContext.Request.Headers.Add(PreferHeaderName, CorrectPreferHeaderValue); + + Assert.Throws(() => _filter.OnActionExecuting(context)); + } + + [Fact] + public void GivenARequestWithNoContentTypeHeader_WhenGetABulkImportRequest_ThenTheResultIsSuccessful() + { + var context = CreateContext(); + context.HttpContext.Request.Method = "GET"; + context.HttpContext.Request.Headers.Add(PreferHeaderName, CorrectPreferHeaderValue); + + _filter.OnActionExecuting(context); + } + + [Fact] + public void GivenARequestWithNoContentTypeHeader_WhenCancelABulkImportRequest_ThenTheResultIsSuccessful() + { + var context = CreateContext(); + context.HttpContext.Request.Method = "DELETE"; + context.HttpContext.Request.Headers.Add(PreferHeaderName, CorrectPreferHeaderValue); + + _filter.OnActionExecuting(context); + } + + [Fact] + public void GivenARequestWithCorrectHeader_WhenCreatingABulkImportRequest_ThenTheResultIsSuccessful() + { + var context = CreateContext(); + context.HttpContext.Request.Method = "POST"; + context.HttpContext.Request.Headers.Add(PreferHeaderName, CorrectPreferHeaderValue); + context.HttpContext.Request.Headers.Add(HeaderNames.ContentType, CorrectContentTypeHeaderValue); + + _filter.OnActionExecuting(context); + } + + private static ActionExecutingContext CreateContext() + { + var context = new ActionExecutingContext( + new ActionContext(new DefaultHttpContext(), new RouteData(), new ActionDescriptor()), + new List(), + new Dictionary(), + FilterTestsHelper.CreateMockBulkImportController()); + + return context; + } + } +} diff --git a/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Headers/ImportResultExtensionsTests.cs b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Headers/ImportResultExtensionsTests.cs new file mode 100644 index 0000000000..31ee7f3160 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Headers/ImportResultExtensionsTests.cs @@ -0,0 +1,43 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using Microsoft.Health.Fhir.Api.Features.ActionResults; +using Microsoft.Health.Fhir.Api.Features.Headers; +using Microsoft.Health.Fhir.Core.Features.Operations; +using Microsoft.Health.Fhir.Core.Features.Routing; +using Microsoft.Net.Http.Headers; +using NSubstitute; +using Xunit; + +namespace Microsoft.Health.Fhir.Api.UnitTests.Features.Headers +{ + public class ImportResultExtensionsTests + { + [Fact] + public void GivenAnImportResult_WhenSettingAContentLocationHeader_ThenImportResultHasAContentLocationHeader() + { + string opName = OperationsConstants.Import; + string id = Guid.NewGuid().ToString(); + var bulkImportOperationUrl = new Uri($"http://localhost/{OperationsConstants.Operations}/{opName}/{id}"); + + var urlResolver = Substitute.For(); + urlResolver.ResolveOperationResultUrl(Arg.Any(), Arg.Any()).Returns(bulkImportOperationUrl); + + var bulkImportResult = ImportResult.Accepted().SetContentLocationHeader(urlResolver, opName, id); + + Assert.Equal(bulkImportOperationUrl.AbsoluteUri, bulkImportResult.Headers[HeaderNames.ContentLocation]); + } + + [Fact] + public void GivenAnImportResult_WhenSettingAContentTypeHeader_ThenImportResultHasAContentTypeHeader() + { + string contentTypeValue = "application/json"; + var bulkImportResult = ImportResult.Accepted().SetContentTypeHeader(contentTypeValue); + + Assert.Equal(contentTypeValue, bulkImportResult.Headers[HeaderNames.ContentType]); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Operations/Import/ImportRequestExtensionsTests.cs b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Operations/Import/ImportRequestExtensionsTests.cs new file mode 100644 index 0000000000..d914c6142c --- /dev/null +++ b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Operations/Import/ImportRequestExtensionsTests.cs @@ -0,0 +1,48 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using Microsoft.Health.Fhir.Api.Features.Operations.Import; +using Microsoft.Health.Fhir.Core.Features.Operations.Import.Models; +using Xunit; + +namespace Microsoft.Health.Fhir.Shared.Api.UnitTests.Features.Operations.Import +{ + public class ImportRequestExtensionsTests + { + [Fact] + public void GivenImportRequestInParamtersFormat_WhenConvert_ThenImportRequestShouldBeReturned() + { + ImportRequest input = new ImportRequest(); + input.InputFormat = "test"; + input.Force = true; + input.Mode = "test"; + input.InputSource = new Uri("http://dummy"); + input.Input = new List() { new InputResource() { Etag = "etag", Type = "type", Url = new Uri("http://dummy/resource") } }; + input.StorageDetail = new ImportRequestStorageDetail() { Type = "blob" }; + + ImportRequest output = input.ToParameters().ExtractImportRequest(); + Assert.Equal(input.InputFormat, output.InputFormat); + Assert.Equal(input.InputSource, output.InputSource); + Assert.Equal(input.Force, output.Force); + Assert.Equal(input.Mode, output.Mode); + Assert.Equal(input.StorageDetail.Type, output.StorageDetail.Type); + Assert.Equal(input.Input[0].Type, output.Input[0].Type); + Assert.Equal(input.Input[0].Url, output.Input[0].Url); + Assert.Equal(input.Input[0].Etag, output.Input[0].Etag); + } + + [Fact] + public void GivenEmptyImportRequestInParamtersFormat_WhenConvert_ThenDefaultValueShouldBeFilled() + { + ImportRequest input = new ImportRequest(); + + ImportRequest output = input.ToParameters().ExtractImportRequest(); + Assert.Equal(output.InputFormat, ImportRequestExtensions.DefaultInputFormat); + Assert.Equal(output.StorageDetail.Type, ImportRequestExtensions.DefaultStorageDetailType); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Operations/Import/InitialImportLockMiddlewareTests.cs b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Operations/Import/InitialImportLockMiddlewareTests.cs new file mode 100644 index 0000000000..262b6508ce --- /dev/null +++ b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Operations/Import/InitialImportLockMiddlewareTests.cs @@ -0,0 +1,100 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Threading.Tasks; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Options; +using Microsoft.Health.Fhir.Api.Features.Operations.Import; +using Microsoft.Health.Fhir.Core.Configs; +using Xunit; + +namespace Microsoft.Health.Fhir.Api.UnitTests.Features.Operations.Import +{ + public class InitialImportLockMiddlewareTests + { + [Fact] + public async Task GivenPostResourceRequest_WhenInitialImportModeEnabled_Then423ShouldBeReturned() + { + InitialImportLockMiddleware middleware = CreateInitialImportLockMiddleware(new ImportTaskConfiguration() { Enabled = true, InitialImportMode = true }); + HttpContext httpContext = new DefaultHttpContext(); + httpContext.Request.Path = "/Patient"; + httpContext.Request.Method = HttpMethods.Post.ToString(); + await middleware.Invoke(httpContext); + + Assert.Equal(423, httpContext.Response.StatusCode); + } + + [Fact] + public async Task GivenGetResourceRequest_WhenInitialImportModeEnabled_Then200ShouldBeReturned() + { + InitialImportLockMiddleware middleware = CreateInitialImportLockMiddleware(new ImportTaskConfiguration() { Enabled = false, InitialImportMode = true }); + HttpContext httpContext = new DefaultHttpContext(); + httpContext.Request.Path = "/patient"; + httpContext.Request.Method = HttpMethods.Get.ToString(); + await middleware.Invoke(httpContext); + + Assert.Equal(200, httpContext.Response.StatusCode); + } + + [Fact] + public async Task GivenStartImportRequest_WhenInitialImportModeEnabled_Then200ShouldBeReturned() + { + InitialImportLockMiddleware middleware = CreateInitialImportLockMiddleware(new ImportTaskConfiguration() { Enabled = false, InitialImportMode = true }); + HttpContext httpContext = new DefaultHttpContext(); + httpContext.Request.Path = "/$import"; + httpContext.Request.Method = HttpMethods.Post.ToString(); + await middleware.Invoke(httpContext); + + Assert.Equal(200, httpContext.Response.StatusCode); + } + + [Fact] + public async Task GivenCancelImportRequest_WhenInitialImportModeEnabled_Then200ShouldBeReturned() + { + InitialImportLockMiddleware middleware = CreateInitialImportLockMiddleware(new ImportTaskConfiguration() { Enabled = false, InitialImportMode = true }); + HttpContext httpContext = new DefaultHttpContext(); + httpContext.Request.Path = "/_operations/import/abc"; + httpContext.Request.Method = HttpMethods.Delete.ToString(); + await middleware.Invoke(httpContext); + + Assert.Equal(200, httpContext.Response.StatusCode); + } + + [Fact] + public async Task GivenPostResourceRequest_WhenImportNotEnabled_Then200ShouldBeReturned() + { + InitialImportLockMiddleware middleware = CreateInitialImportLockMiddleware(new ImportTaskConfiguration() { Enabled = false, InitialImportMode = true }); + HttpContext httpContext = new DefaultHttpContext(); + httpContext.Request.Path = "/Patient"; + httpContext.Request.Method = HttpMethods.Post.ToString(); + await middleware.Invoke(httpContext); + + Assert.Equal(200, httpContext.Response.StatusCode); + } + + [Fact] + public async Task GivenPostResourceRequest_WhenInitialImportModeNotEnabled_Then200ShouldBeReturned() + { + InitialImportLockMiddleware middleware = CreateInitialImportLockMiddleware(new ImportTaskConfiguration() { Enabled = true, InitialImportMode = false }); + HttpContext httpContext = new DefaultHttpContext(); + httpContext.Request.Path = "/Patient"; + httpContext.Request.Method = HttpMethods.Post.ToString(); + await middleware.Invoke(httpContext); + + Assert.Equal(200, httpContext.Response.StatusCode); + } + + private InitialImportLockMiddleware CreateInitialImportLockMiddleware(ImportTaskConfiguration importTaskConfiguration) + { + return new InitialImportLockMiddleware( + async x => + { + x.Response.StatusCode = 200; + await Task.CompletedTask; + }, + Options.Create(importTaskConfiguration)); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Routing/UrlResolverTests.cs b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Routing/UrlResolverTests.cs index c40786efee..3a8565f5dd 100644 --- a/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Routing/UrlResolverTests.cs +++ b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Features/Routing/UrlResolverTests.cs @@ -307,7 +307,7 @@ public void GivenAReindexOperation_WhenOperationResultUrlIsResolved_ThenCorrectU public void GivenAnUnknownOperation_WhenOperationResultUrlIsResolved_ThenOperationNotImplementedExceptionShouldBeThrown() { const string id = "12345"; - const string opName = "import"; + const string opName = "fakeOp"; Assert.Throws(() => _urlResolver.ResolveOperationResultUrl(opName, id)); } diff --git a/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Microsoft.Health.Fhir.Shared.Api.UnitTests.projitems b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Microsoft.Health.Fhir.Shared.Api.UnitTests.projitems index 933fc5a919..93e063021b 100644 --- a/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Microsoft.Health.Fhir.Shared.Api.UnitTests.projitems +++ b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Microsoft.Health.Fhir.Shared.Api.UnitTests.projitems @@ -12,6 +12,7 @@ + @@ -28,6 +29,7 @@ + @@ -42,9 +44,12 @@ + + + diff --git a/src/Microsoft.Health.Fhir.Shared.Api/Controllers/ImportController.cs b/src/Microsoft.Health.Fhir.Shared.Api/Controllers/ImportController.cs new file mode 100644 index 0000000000..dcf2ada9a9 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Shared.Api/Controllers/ImportController.cs @@ -0,0 +1,203 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Threading.Tasks; +using EnsureThat; +using Hl7.Fhir.Model; +using MediatR; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.Health.Api.Features.Audit; +using Microsoft.Health.Core.Features.Context; +using Microsoft.Health.Fhir.Api.Configs; +using Microsoft.Health.Fhir.Api.Features.ActionResults; +using Microsoft.Health.Fhir.Api.Features.Filters; +using Microsoft.Health.Fhir.Api.Features.Headers; +using Microsoft.Health.Fhir.Api.Features.Operations.Import; +using Microsoft.Health.Fhir.Api.Features.Routing; +using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Exceptions; +using Microsoft.Health.Fhir.Core.Extensions; +using Microsoft.Health.Fhir.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Features.Operations; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.Core.Features.Operations.Import.Models; +using Microsoft.Health.Fhir.Core.Features.Routing; +using Microsoft.Health.Fhir.Core.Messages.Import; +using Microsoft.Health.Fhir.ValueSets; + +namespace Microsoft.Health.Fhir.Api.Controllers +{ + [ServiceFilter(typeof(AuditLoggingFilterAttribute))] + [ServiceFilter(typeof(OperationOutcomeExceptionFilterAttribute))] + public class ImportController : Controller + { + /* + * We are currently hardcoding the routing attribute to be specific to BulkImport and + * get forwarded to this controller. As we add more operations we would like to resolve + * the routes in a more dynamic manner. One way would be to use a regex route constraint + * - eg: "{operation:regex(^\\$([[a-zA-Z]]+))}" - and use the appropriate operation handler. + * Another way would be to use the capability statement to dynamically determine what operations + * are supported. + * It would be easier to determine what pattern to follow once we have built support for a couple + * of operations. Then we can refactor this controller accordingly. + */ + + private readonly IReadOnlyList allowedImportFormat = new List { "application/fhir+ndjson" }; + private readonly IReadOnlyList allowedStorageType = new List { "azure-blob" }; + private readonly IMediator _mediator; + private readonly RequestContextAccessor _fhirRequestContextAccessor; + private readonly IUrlResolver _urlResolver; + private readonly FeatureConfiguration _features; + private readonly ILogger _logger; + private readonly ImportTaskConfiguration _importConfig; + + public ImportController( + IMediator mediator, + RequestContextAccessor fhirRequestContextAccessor, + IUrlResolver urlResolver, + IOptions operationsConfig, + IOptions features, + ILogger logger) + { + EnsureArg.IsNotNull(fhirRequestContextAccessor, nameof(fhirRequestContextAccessor)); + EnsureArg.IsNotNull(operationsConfig?.Value?.Import, nameof(operationsConfig)); + EnsureArg.IsNotNull(urlResolver, nameof(urlResolver)); + EnsureArg.IsNotNull(features?.Value, nameof(features)); + EnsureArg.IsNotNull(mediator, nameof(mediator)); + EnsureArg.IsNotNull(logger, nameof(logger)); + + _fhirRequestContextAccessor = fhirRequestContextAccessor; + _importConfig = operationsConfig.Value.Import; + _urlResolver = urlResolver; + _features = features.Value; + _mediator = mediator; + _logger = logger; + } + + [HttpPost] + [Route(KnownRoutes.Import)] + [ServiceFilter(typeof(ValidateImportRequestFilterAttribute))] + [AuditEventType(AuditEventSubType.Import)] + public async Task Import([FromBody] Parameters importTaskParameters) + { + CheckIfImportIsEnabled(); + + ImportRequest importRequest = importTaskParameters.ExtractImportRequest(); + ValidateImportRequestConfiguration(importRequest); + + if (!ImportConstants.InitialLoadMode.Equals(importRequest.Mode, StringComparison.Ordinal)) + { + throw new RequestNotValidException(Resources.OnlyInitialImportOperationSupported); + } + + if (!importRequest.Force && !_importConfig.InitialImportMode) + { + throw new RequestNotValidException(Resources.InitialImportModeNotEnabled); + } + + CreateImportResponse response = await _mediator.ImportAsync( + _fhirRequestContextAccessor.RequestContext.Uri, + importRequest.InputFormat, + importRequest.InputSource, + importRequest.Input, + importRequest.StorageDetail, + HttpContext.RequestAborted); + + var bulkImportResult = ImportResult.Accepted(); + bulkImportResult.SetContentLocationHeader(_urlResolver, OperationsConstants.Import, response.TaskId); + return bulkImportResult; + } + + [HttpDelete] + [Route(KnownRoutes.ImportJobLocation, Name = RouteNames.CancelImport)] + [AuditEventType(AuditEventSubType.Import)] + public async Task CancelImport(string idParameter) + { + CancelImportResponse response = await _mediator.CancelImportAsync(idParameter, HttpContext.RequestAborted); + + _logger.LogInformation($"CancelImport {response.StatusCode}"); + return new ImportResult(response.StatusCode); + } + + [HttpGet] + [Route(KnownRoutes.ImportJobLocation, Name = RouteNames.GetImportStatusById)] + [AuditEventType(AuditEventSubType.Import)] + public async Task GetImportStatusById(string idParameter) + { + var getBulkImportResult = await _mediator.GetImportStatusAsync( + idParameter, + HttpContext.RequestAborted); + + // If the job is complete, we need to return 200 along with the completed data to the client. + // Else we need to return 202 - Accepted. + ImportResult bulkImportActionResult; + if (getBulkImportResult.StatusCode == HttpStatusCode.OK) + { + bulkImportActionResult = ImportResult.Ok(getBulkImportResult.TaskResult); + bulkImportActionResult.SetContentTypeHeader(OperationsConstants.BulkImportContentTypeHeaderValue); + } + else + { + bulkImportActionResult = ImportResult.Accepted(); + } + + return bulkImportActionResult; + } + + private void CheckIfImportIsEnabled() + { + if (!_importConfig.Enabled) + { + throw new RequestNotValidException(string.Format(Resources.OperationNotEnabled, OperationsConstants.Import)); + } + } + + private void ValidateImportRequestConfiguration(ImportRequest importData) + { + if (importData == null) + { + _logger.LogInformation("Failed to deserialize import request body as import configuration."); + throw new RequestNotValidException(Resources.ImportRequestNotValid); + } + + var inputFormat = importData.InputFormat; + if (!allowedImportFormat.Any(s => s.Equals(inputFormat, StringComparison.OrdinalIgnoreCase))) + { + throw new RequestNotValidException(string.Format(Resources.ImportRequestValueNotValid, nameof(inputFormat))); + } + + var storageDetails = importData.StorageDetail; + if (storageDetails != null && !allowedStorageType.Any(s => s.Equals(storageDetails.Type, StringComparison.OrdinalIgnoreCase))) + { + throw new RequestNotValidException(string.Format(Resources.ImportRequestValueNotValid, nameof(storageDetails))); + } + + var input = importData.Input; + if (input == null || input.Count == 0) + { + throw new RequestNotValidException(string.Format(Resources.ImportRequestValueNotValid, nameof(input))); + } + + foreach (var item in input) + { + if (!Enum.IsDefined(typeof(ResourceType), item.Type)) + { + throw new RequestNotValidException(string.Format(Resources.UnsupportedResourceType, item.Type)); + } + + if (item.Url == null) + { + throw new RequestNotValidException(string.Format(Resources.ImportRequestValueNotValid, "input.url")); + } + } + } + } +} diff --git a/src/Microsoft.Health.Fhir.Shared.Api/Features/Operations/Import/ImportRequestExtensions.cs b/src/Microsoft.Health.Fhir.Shared.Api/Features/Operations/Import/ImportRequestExtensions.cs new file mode 100644 index 0000000000..2872058995 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Shared.Api/Features/Operations/Import/ImportRequestExtensions.cs @@ -0,0 +1,160 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Linq; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Core.Features.Operations.Import.Models; +using static Hl7.Fhir.Model.Parameters; + +namespace Microsoft.Health.Fhir.Api.Features.Operations.Import +{ + public static class ImportRequestExtensions + { + public const string InputFormatParamterName = "inputFormat"; + public const string DefaultInputFormat = "application/fhir+ndjson"; + public const string InputSourceParamterName = "inputSource"; + public const string InputParamterName = "input"; + public const string TypeParamterName = "type"; + public const string UrlParamterName = "url"; + public const string EtagParamterName = "etag"; + public const string StorageDetailParamterName = "storageDetail"; + public const string ModeParamterName = "mode"; + public const string ForceParamterName = "force"; + public const string DefaultStorageDetailType = "azure-blob"; + + public static Parameters ToParameters(this ImportRequest importRequest) + { + Parameters paramters = new Parameters(); + + if (string.IsNullOrEmpty(importRequest.InputFormat)) + { + paramters.Add(InputFormatParamterName, new FhirString(DefaultInputFormat)); + } + else + { + paramters.Add(InputFormatParamterName, new FhirString(importRequest.InputFormat)); + } + + if (importRequest.InputSource != null) + { + paramters.Add(InputSourceParamterName, new FhirUri(importRequest.InputSource)); + } + + if (importRequest.Input != null) + { + foreach (InputResource importResource in importRequest.Input) + { + ParameterComponent inputResourceComponent = new ParameterComponent() { Name = InputParamterName }; + paramters.Parameter.Add(inputResourceComponent); + + if (!string.IsNullOrEmpty(importResource.Type)) + { + inputResourceComponent.Part.Add(new ParameterComponent() { Name = TypeParamterName, Value = new FhirString(importResource.Type) }); + } + + if (!string.IsNullOrEmpty(importResource.Etag)) + { + inputResourceComponent.Part.Add(new ParameterComponent() { Name = EtagParamterName, Value = new FhirString(importResource.Etag) }); + } + + if (importResource.Url != null) + { + inputResourceComponent.Part.Add(new ParameterComponent() { Name = UrlParamterName, Value = new FhirUri(importResource.Url) }); + } + } + } + + ParameterComponent storageDetailsParameterComponent = new ParameterComponent() { Name = StorageDetailParamterName }; + if (!string.IsNullOrWhiteSpace(importRequest.StorageDetail?.Type)) + { + storageDetailsParameterComponent.Part.Add(new ParameterComponent() { Name = TypeParamterName, Value = new FhirString(importRequest.StorageDetail.Type) }); + } + + paramters.Parameter.Add(storageDetailsParameterComponent); + + if (!string.IsNullOrEmpty(importRequest.Mode)) + { + paramters.Add(ModeParamterName, new FhirString(importRequest.Mode)); + } + + if (importRequest.Force) + { + paramters.Add(ForceParamterName, new FhirBoolean(true)); + } + + return paramters; + } + + public static ImportRequest ExtractImportRequest(this Parameters parameters) + { + ImportRequest importRequest = new ImportRequest(); + + if (parameters.TryGetStringValue(InputFormatParamterName, out string inputFormat)) + { + importRequest.InputFormat = inputFormat; + } + + if (parameters.TryGetUriValue(InputSourceParamterName, out Uri uriValue)) + { + importRequest.InputSource = uriValue; + } + + var inputResources = new List(); + foreach (ParameterComponent paramComponent in parameters.Get(InputParamterName)) + { + ParameterComponent typeParam = paramComponent.Part?.Where(p => TypeParamterName.Equals(p.Name, StringComparison.Ordinal))?.FirstOrDefault(); + ParameterComponent urlParam = paramComponent.Part?.Where(p => UrlParamterName.Equals(p.Name, StringComparison.Ordinal))?.FirstOrDefault(); + ParameterComponent etagParam = paramComponent.Part?.Where(p => EtagParamterName.Equals(p.Name, StringComparison.Ordinal))?.FirstOrDefault(); + + InputResource inputResource = new InputResource(); + + if (typeParam.TryGetStringValue(out string type)) + { + inputResource.Type = type; + } + + if (urlParam.TryGetUriValue(out Uri url)) + { + inputResource.Url = url; + } + + if (etagParam.TryGetStringValue(out string etag)) + { + inputResource.Etag = etag; + } + + inputResources.Add(inputResource); + } + + importRequest.Input = inputResources; + importRequest.StorageDetail = new ImportRequestStorageDetail(); + + ParameterComponent storageDetailsComponent = parameters.GetSingle(StorageDetailParamterName); + ParameterComponent storageTypeParam = storageDetailsComponent?.Part?.Where(p => TypeParamterName.Equals(p.Name, StringComparison.Ordinal))?.FirstOrDefault(); + if (storageTypeParam.TryGetStringValue(out string storageType)) + { + importRequest.StorageDetail.Type = storageType; + } + else + { + importRequest.StorageDetail.Type = DefaultStorageDetailType; + } + + if (parameters.TryGetStringValue(ModeParamterName, out string mode)) + { + importRequest.Mode = mode; + } + + if (parameters.TryGetBooleanValue(ForceParamterName, out bool force)) + { + importRequest.Force = force; + } + + return importRequest; + } + } +} diff --git a/src/Microsoft.Health.Fhir.Shared.Api/Features/Operations/ParametersExtensions.cs b/src/Microsoft.Health.Fhir.Shared.Api/Features/Operations/ParametersExtensions.cs new file mode 100644 index 0000000000..bc60e2460c --- /dev/null +++ b/src/Microsoft.Health.Fhir.Shared.Api/Features/Operations/ParametersExtensions.cs @@ -0,0 +1,61 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using Hl7.Fhir.Model; + +namespace Microsoft.Health.Fhir.Api.Features.Operations +{ + public static class ParametersExtensions + { + public static bool TryGetStringValue(this Parameters parameters, string name, out string stringValue) + { + Parameters.ParameterComponent param = parameters.GetSingle(name); + + return param.TryGetStringValue(out stringValue); + } + + public static bool TryGetUriValue(this Parameters parameters, string name, out Uri uriValue) + { + Parameters.ParameterComponent param = parameters.GetSingle(name); + + return param.TryGetUriValue(out uriValue); + } + + public static bool TryGetStringValue(this Parameters.ParameterComponent paramComponent, out string stringValue) + { + stringValue = paramComponent?.Value?.ToString(); + + return stringValue != null; + } + + public static bool TryGetBooleanValue(this Parameters.ParameterComponent paramComponent, out bool boolValue) + { + Element booleanElement = paramComponent?.Value; + + return bool.TryParse(booleanElement?.ToString(), out boolValue); + } + + public static bool TryGetUriValue(this Parameters.ParameterComponent paramComponent, out Uri uriValue) + { + Element uriElement = paramComponent?.Value; + + return Uri.TryCreate(uriElement?.ToString(), UriKind.RelativeOrAbsolute, out uriValue); + } + + public static bool TryGetBooleanValue(this Parameters parameters, string name, out bool booleanValue) + { + Parameters.ParameterComponent param = parameters?.GetSingle(name); + + if (param == null) + { + booleanValue = false; + return false; + } + + return param.TryGetBooleanValue(out booleanValue); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Shared.Api/Microsoft.Health.Fhir.Shared.Api.projitems b/src/Microsoft.Health.Fhir.Shared.Api/Microsoft.Health.Fhir.Shared.Api.projitems index ad7a671da8..8f4443387e 100644 --- a/src/Microsoft.Health.Fhir.Shared.Api/Microsoft.Health.Fhir.Shared.Api.projitems +++ b/src/Microsoft.Health.Fhir.Shared.Api/Microsoft.Health.Fhir.Shared.Api.projitems @@ -10,6 +10,7 @@ + @@ -25,7 +26,9 @@ + + diff --git a/src/Microsoft.Health.Fhir.Shared.Api/Modules/FhirModule.cs b/src/Microsoft.Health.Fhir.Shared.Api/Modules/FhirModule.cs index 943c27c9b4..2a7147ed65 100644 --- a/src/Microsoft.Health.Fhir.Shared.Api/Modules/FhirModule.cs +++ b/src/Microsoft.Health.Fhir.Shared.Api/Modules/FhirModule.cs @@ -102,6 +102,7 @@ ResourceElement SetMetadata(Resource resource, string versionId, DateTimeOffset services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); + services.AddSingleton(); // Support for resolve() FhirPathCompiler.DefaultSymbolTable.AddFhirExtensions(); diff --git a/src/Microsoft.Health.Fhir.Shared.Api/Modules/OperationsModule.cs b/src/Microsoft.Health.Fhir.Shared.Api/Modules/OperationsModule.cs index 5d2cf06015..4e987032a5 100644 --- a/src/Microsoft.Health.Fhir.Shared.Api/Modules/OperationsModule.cs +++ b/src/Microsoft.Health.Fhir.Shared.Api/Modules/OperationsModule.cs @@ -11,8 +11,10 @@ using Microsoft.Health.Fhir.Core.Features.Conformance; using Microsoft.Health.Fhir.Core.Features.Operations.Everything; using Microsoft.Health.Fhir.Core.Features.Operations.Export; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; using Microsoft.Health.Fhir.Core.Features.Operations.Reindex; using Microsoft.Health.Fhir.Core.Messages.Search; +using Microsoft.Health.Fhir.Shared.Core.Features.Operations.Import; namespace Microsoft.Health.Fhir.Api.Modules { @@ -67,6 +69,26 @@ public void Load(IServiceCollection services) .AsService(); services.AddSingleton(); + + services.Add() + .Transient() + .AsSelf() + .AsImplementedInterfaces(); + + services.Add() + .Transient() + .AsSelf() + .AsImplementedInterfaces(); + + services.Add() + .Transient() + .AsSelf() + .AsImplementedInterfaces(); + + services.Add() + .Transient() + .AsSelf() + .AsImplementedInterfaces(); } } } diff --git a/src/Microsoft.Health.Fhir.Shared.Api/Registration/FhirServerServiceCollectionExtensions.cs b/src/Microsoft.Health.Fhir.Shared.Api/Registration/FhirServerServiceCollectionExtensions.cs index 4cea35c193..f22c294612 100644 --- a/src/Microsoft.Health.Fhir.Shared.Api/Registration/FhirServerServiceCollectionExtensions.cs +++ b/src/Microsoft.Health.Fhir.Shared.Api/Registration/FhirServerServiceCollectionExtensions.cs @@ -22,6 +22,7 @@ using Microsoft.Health.Fhir.Api.Features.ExceptionNotifications; using Microsoft.Health.Fhir.Api.Features.Exceptions; using Microsoft.Health.Fhir.Api.Features.Operations.Export; +using Microsoft.Health.Fhir.Api.Features.Operations.Import; using Microsoft.Health.Fhir.Api.Features.Operations.Reindex; using Microsoft.Health.Fhir.Api.Features.Routing; using Microsoft.Health.Fhir.Api.Features.Throttling; @@ -72,6 +73,8 @@ public static IFhirServerBuilder AddFhirServer( services.AddSingleton(Options.Options.Create(fhirServerConfiguration.Operations.Export)); services.AddSingleton(Options.Options.Create(fhirServerConfiguration.Operations.Reindex)); services.AddSingleton(Options.Options.Create(fhirServerConfiguration.Operations.ConvertData)); + services.AddSingleton(Options.Options.Create(fhirServerConfiguration.Operations.IntegrationDataStore)); + services.AddSingleton(Options.Options.Create(fhirServerConfiguration.Operations.Import)); services.AddSingleton(Options.Options.Create(fhirServerConfiguration.Audit)); services.AddSingleton(Options.Options.Create(fhirServerConfiguration.Bundle)); services.AddSingleton(Options.Options.Create(fhirServerConfiguration.Throttling)); @@ -178,6 +181,8 @@ public Action Configure(Action next) app.UseMiddleware(); + app.UseInitialImportLock(); + // Throttling needs to come after Audit and ApiNotifications so we can audit it and track it for API metrics. // It should also be after authentication app.UseThrottling(); diff --git a/src/Microsoft.Health.Fhir.Shared.Client/FhirClient.cs b/src/Microsoft.Health.Fhir.Shared.Client/FhirClient.cs index a7352da469..59eaa3895e 100644 --- a/src/Microsoft.Health.Fhir.Shared.Client/FhirClient.cs +++ b/src/Microsoft.Health.Fhir.Shared.Client/FhirClient.cs @@ -397,6 +397,43 @@ public async Task ConvertDataAsync(Parameters parameters, CancellationTo return await response.Content.ReadAsStringAsync(cancellationToken); } + public async Task ImportAsync(Parameters parameters, CancellationToken cancellationToken = default) + { + string requestPath = "$import"; + using var message = new HttpRequestMessage(HttpMethod.Post, requestPath) + { + Content = CreateStringContent(parameters), + }; + + message.Headers.Add("Prefer", "respond-async"); + + using HttpResponseMessage response = await HttpClient.SendAsync(message, cancellationToken); + + await EnsureSuccessStatusCodeAsync(response); + + return response.Content.Headers.ContentLocation; + } + + public async Task CancelImport(Uri contentLocation, CancellationToken cancellationToken = default) + { + using var message = new HttpRequestMessage(HttpMethod.Delete, contentLocation); + message.Headers.Add("Prefer", "respond-async"); + + await HttpClient.SendAsync(message, cancellationToken); + } + + public async Task CheckImportAsync(Uri contentLocation, CancellationToken cancellationToken = default) + { + using var message = new HttpRequestMessage(HttpMethod.Get, contentLocation); + message.Headers.Add("Prefer", "respond-async"); + + var response = await HttpClient.SendAsync(message, cancellationToken); + + await EnsureSuccessStatusCodeAsync(response); + + return response; + } + public async Task> PostBundleAsync(Resource bundle, CancellationToken cancellationToken = default) { using var message = new HttpRequestMessage(HttpMethod.Post, string.Empty) diff --git a/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Operations/Import/ImportErrorSerializerTests.cs b/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Operations/Import/ImportErrorSerializerTests.cs new file mode 100644 index 0000000000..42969b126f --- /dev/null +++ b/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Operations/Import/ImportErrorSerializerTests.cs @@ -0,0 +1,34 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using Hl7.Fhir.Model; +using Hl7.Fhir.Serialization; +using Microsoft.Health.Fhir.Shared.Core.Features.Operations.Import; +using Xunit; + +namespace Microsoft.Health.Fhir.Shared.Core.UnitTests.Features.Operations.Import +{ + public class ImportErrorSerializerTests + { + private readonly FhirJsonSerializer _jsonSerializer = new FhirJsonSerializer(); + + [Fact] + public void GivenImportProcessError_WhenSerialize_ValidStringShouldBeReturned() + { + string errorMessage = "Test Error"; + ImportErrorSerializer serializer = new ImportErrorSerializer(_jsonSerializer); + + string outcome = serializer.Serialize(10, new Exception(errorMessage)); + + FhirJsonParser parser = new FhirJsonParser(); + OperationOutcome operationOutcome = parser.Parse(outcome); + + Assert.Equal(OperationOutcome.IssueSeverity.Error, operationOutcome.Issue[0].Severity); + Assert.Equal($"Failed to process resource at line: {10}", operationOutcome.Issue[0].Diagnostics); + Assert.Equal(errorMessage, operationOutcome.Issue[0].Details.Text); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Microsoft.Health.Fhir.Shared.Core.UnitTests.projitems b/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Microsoft.Health.Fhir.Shared.Core.UnitTests.projitems index ad976b10d7..28ebdead58 100644 --- a/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Microsoft.Health.Fhir.Shared.Core.UnitTests.projitems +++ b/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Microsoft.Health.Fhir.Shared.Core.UnitTests.projitems @@ -15,6 +15,7 @@ + diff --git a/src/Microsoft.Health.Fhir.Shared.Core/Features/Operations/Import/ImportErrorSerializer.cs b/src/Microsoft.Health.Fhir.Shared.Core/Features/Operations/Import/ImportErrorSerializer.cs new file mode 100644 index 0000000000..418a1ce352 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Shared.Core/Features/Operations/Import/ImportErrorSerializer.cs @@ -0,0 +1,47 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using EnsureThat; +using Hl7.Fhir.Model; +using Hl7.Fhir.Serialization; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; + +namespace Microsoft.Health.Fhir.Shared.Core.Features.Operations.Import +{ + public class ImportErrorSerializer : IImportErrorSerializer + { + private readonly FhirJsonSerializer _fhirJsonSerializer; + + public ImportErrorSerializer(FhirJsonSerializer fhirJsonSerializer) + { + EnsureArg.IsNotNull(fhirJsonSerializer, nameof(fhirJsonSerializer)); + + _fhirJsonSerializer = fhirJsonSerializer; + } + + public string Serialize(long index, Exception ex) + { + EnsureArg.IsNotNull(ex, nameof(ex)); + + return Serialize(index, ex.Message); + } + + public string Serialize(long index, string errorMessage) + { + EnsureArg.IsNotNullOrEmpty(errorMessage, nameof(errorMessage)); + + var issue = new OperationOutcome.IssueComponent(); + issue.Severity = OperationOutcome.IssueSeverity.Error; + issue.Diagnostics = string.Format("Failed to process resource at line: {0}", index); + issue.Details = new CodeableConcept(); + issue.Details.Text = errorMessage; + OperationOutcome operationOutcome = new OperationOutcome(); + operationOutcome.Issue.Add(issue); + + return _fhirJsonSerializer.SerializeToString(operationOutcome); + } + } +} diff --git a/src/Microsoft.Health.Fhir.Shared.Core/Features/Operations/Import/ImportResourceParser.cs b/src/Microsoft.Health.Fhir.Shared.Core/Features/Operations/Import/ImportResourceParser.cs new file mode 100644 index 0000000000..2a9782a044 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Shared.Core/Features/Operations/Import/ImportResourceParser.cs @@ -0,0 +1,86 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.IO; +using System.IO.Compression; +using System.Text; +using EnsureThat; +using Hl7.Fhir.Model; +using Hl7.Fhir.Serialization; +using Microsoft.Health.Fhir.Core.Extensions; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.Core.Features.Resources; +using Microsoft.Health.Fhir.Core.Models; +using Microsoft.IO; + +namespace Microsoft.Health.Fhir.Core.Features.Operations.Import +{ + public class ImportResourceParser : IImportResourceParser + { + internal static readonly Encoding ResourceEncoding = new UTF8Encoding(encoderShouldEmitUTF8Identifier: true); + + private FhirJsonParser _parser; + private IResourceWrapperFactory _resourceFactory; + private IResourceMetaPopulator _resourceMetaPopulator; + private RecyclableMemoryStreamManager _recyclableMemoryStreamManager; + private ICompressedRawResourceConverter _compressedRawResourceConverter; + + public ImportResourceParser(FhirJsonParser parser, IResourceWrapperFactory resourceFactory, IResourceMetaPopulator resourceMetaPopulator, ICompressedRawResourceConverter compressedRawResourceConverter) + { + EnsureArg.IsNotNull(parser, nameof(parser)); + EnsureArg.IsNotNull(resourceFactory, nameof(resourceFactory)); + EnsureArg.IsNotNull(compressedRawResourceConverter, nameof(compressedRawResourceConverter)); + + _parser = parser; + _resourceFactory = resourceFactory; + _resourceMetaPopulator = resourceMetaPopulator; + _compressedRawResourceConverter = compressedRawResourceConverter; + _recyclableMemoryStreamManager = new RecyclableMemoryStreamManager(); + } + + public ImportResource Parse(long id, long index, string rawContent) + { + Resource resource = _parser.Parse(rawContent); + CheckConditionalReferenceInResource(resource); + + _resourceMetaPopulator.Populate(id, resource); + + ResourceElement resourceElement = resource.ToResourceElement(); + ResourceWrapper resourceWapper = _resourceFactory.Create(resourceElement, false, true); + + return new ImportResource(id, index, resourceWapper) + { + CompressedStream = GenerateCompressedRawResource(resourceWapper.RawResource.Data), + }; + } + + private static void CheckConditionalReferenceInResource(Resource resource) + { + IEnumerable references = resource.GetAllChildren(); + foreach (ResourceReference reference in references) + { + if (string.IsNullOrWhiteSpace(reference.Reference)) + { + continue; + } + + if (reference.Reference.Contains("?", StringComparison.Ordinal)) + { + throw new NotSupportedException("Conditional reference not supported for initial import."); + } + } + } + + private Stream GenerateCompressedRawResource(string rawResource) + { + var outputStream = new RecyclableMemoryStream(_recyclableMemoryStreamManager); + _compressedRawResourceConverter.WriteCompressedRawResource(outputStream, rawResource); + + return outputStream; + } + } +} diff --git a/src/Microsoft.Health.Fhir.Shared.Core/Microsoft.Health.Fhir.Shared.Core.projitems b/src/Microsoft.Health.Fhir.Shared.Core/Microsoft.Health.Fhir.Shared.Core.projitems index f35ec11b85..9536bfc297 100644 --- a/src/Microsoft.Health.Fhir.Shared.Core/Microsoft.Health.Fhir.Shared.Core.projitems +++ b/src/Microsoft.Health.Fhir.Shared.Core/Microsoft.Health.Fhir.Shared.Core.projitems @@ -26,6 +26,8 @@ + + diff --git a/src/Microsoft.Health.Fhir.Shared.Tests/Samples.cs b/src/Microsoft.Health.Fhir.Shared.Tests/Samples.cs index 980017e674..95faa236ce 100644 --- a/src/Microsoft.Health.Fhir.Shared.Tests/Samples.cs +++ b/src/Microsoft.Health.Fhir.Shared.Tests/Samples.cs @@ -157,5 +157,10 @@ public static string GetXml(string fileName) { return EmbeddedResourceManager.GetStringContent(EmbeddedResourceSubNamespace, fileName, "xml"); } + + public static string GetNdJson(string fileName) + { + return EmbeddedResourceManager.GetStringContent(EmbeddedResourceSubNamespace, fileName, "ndjson"); + } } } diff --git a/src/Microsoft.Health.Fhir.Shared.Web/Startup.cs b/src/Microsoft.Health.Fhir.Shared.Web/Startup.cs index 68fc967fa1..75203d14b9 100644 --- a/src/Microsoft.Health.Fhir.Shared.Web/Startup.cs +++ b/src/Microsoft.Health.Fhir.Shared.Web/Startup.cs @@ -10,8 +10,12 @@ using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; +using Microsoft.Health.Extensions.DependencyInjection; +using Microsoft.Health.Fhir.Api.Features.BackgroundTaskService; using Microsoft.Health.Fhir.Azure; +using Microsoft.Health.Fhir.Core.Configs; using Microsoft.Health.SqlServer.Configs; +using Microsoft.Health.TaskManagement; namespace Microsoft.Health.Fhir.Web { @@ -33,6 +37,7 @@ public virtual void ConfigureServices(IServiceCollection services) .AddAzureExportDestinationClient() .AddAzureExportClientInitializer(Configuration) .AddContainerRegistryTokenProvider() + .AddAzureIntegrationDataStoreClient(Configuration) .AddConvertData() .AddMemberMatch(); @@ -49,6 +54,12 @@ public virtual void ConfigureServices(IServiceCollection services) }); } + // Set task hosting and related background service + if (bool.TryParse(Configuration["TaskHosting:Enabled"], out bool taskHostingsOn) && taskHostingsOn) + { + AddTaskHostingService(services); + } + /* The execution of IHostedServices depends on the order they are added to the dependency injection container, so we need to ensure that the schema is initialized before the background workers are started. @@ -78,6 +89,21 @@ need to ensure that the schema is initialized before the background workers are AddApplicationInsightsTelemetry(services); } + private void AddTaskHostingService(IServiceCollection services) + { + services.Add() + .Scoped() + .AsSelf(); + services.AddFactory>(); + + services.AddHostedService(); + services.Add() + .Scoped() + .AsSelf() + .AsImplementedInterfaces(); + services.Configure(options => Configuration.GetSection("TaskHosting").Bind(options)); + } + // This method gets called by the runtime. Use this method to configure the HTTP request pipeline. public virtual void Configure(IApplicationBuilder app) { diff --git a/src/Microsoft.Health.Fhir.Shared.Web/appsettings.json b/src/Microsoft.Health.Fhir.Shared.Web/appsettings.json index 8cbc07ec57..0ca0e96df0 100644 --- a/src/Microsoft.Health.Fhir.Shared.Web/appsettings.json +++ b/src/Microsoft.Health.Fhir.Shared.Web/appsettings.json @@ -83,6 +83,13 @@ }, "Validate": { "CacheDurationInSeconds": 14400 + }, + "Import": { + "Enabled": false + }, + "IntegrationDataStore": { + "StorageAccountConnection": null, + "StorageAccountUri": null } }, "Audit": { @@ -161,6 +168,10 @@ } } }, + "TaskHosting": { + "Enabled": false, + "MaxRunningTaskCount": 1 + }, "ApplicationInsights": { "InstrumentationKey": "" }, diff --git a/src/Microsoft.Health.Fhir.Shared.Web/roles.json b/src/Microsoft.Health.Fhir.Shared.Web/roles.json index 7e132980e9..95c6ba47e9 100644 --- a/src/Microsoft.Health.Fhir.Shared.Web/roles.json +++ b/src/Microsoft.Health.Fhir.Shared.Web/roles.json @@ -34,6 +34,17 @@ "/" ] }, + { + "name": "globalImporter", + "dataActions": [ + "read", + "import" + ], + "notDataActions": [], + "scopes": [ + "/" + ] + }, { "name": "globalWriter", "dataActions": [ diff --git a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Import/SqlServerBulkImportOperationTests.cs b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Import/SqlServerBulkImportOperationTests.cs new file mode 100644 index 0000000000..6a39a22c09 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Import/SqlServerBulkImportOperationTests.cs @@ -0,0 +1,68 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Linq; +using System.Reflection; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.Fhir.SqlServer.Features.Storage; +using Microsoft.Health.SqlServer.Features.Schema.Model; +using Xunit; + +namespace Microsoft.Health.Fhir.SqlServer.UnitTests.Features.Import +{ + public class SqlServerBulkImportOperationTests + { + [Fact] + public void GivenResourceRelatedTables_WhenNewIndexesAdded_BulkImportOperationShouldSupportNewIndexes() + { + Table[] resourceRelatedTables = new Table[] + { + VLatest.Resource, + VLatest.ResourceWriteClaim, + VLatest.CompartmentAssignment, + VLatest.DateTimeSearchParam, + VLatest.NumberSearchParam, + VLatest.QuantitySearchParam, + VLatest.ReferenceSearchParam, + VLatest.ReferenceTokenCompositeSearchParam, + VLatest.StringSearchParam, + VLatest.TokenDateTimeCompositeSearchParam, + VLatest.TokenNumberNumberCompositeSearchParam, + VLatest.TokenQuantityCompositeSearchParam, + VLatest.TokenSearchParam, + VLatest.TokenStringCompositeSearchParam, + VLatest.TokenText, + VLatest.TokenTokenCompositeSearchParam, + VLatest.UriSearchParam, + }; + + string[] excludeIndexNames = new string[] + { + "IX_Resource_ResourceTypeId_ResourceId_Version", + "IX_Resource_ResourceTypeId_ResourceId", + "IX_Resource_ResourceTypeId_ResourceSurrgateId", + }; + + string[] supportedIndexesNames = SqlImportOperation.OptionalIndexesForImport.Select(i => i.index.IndexName).ToArray(); + int expectedIndexesCount = 0; + foreach (Table table in resourceRelatedTables) + { + string[] indexNames = table.GetType().GetFields(BindingFlags.Instance | BindingFlags.NonPublic).Where(f => f.Name.StartsWith("IX_")).Select(f => f.Name).ToArray(); + foreach (string indexName in indexNames) + { + if (excludeIndexNames.Contains(indexName)) + { + continue; + } + + Assert.Contains(indexName, supportedIndexesNames); + expectedIndexesCount++; + } + } + + Assert.Equal(expectedIndexesCount, supportedIndexesNames.Length); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Import/UpsertSPRelationTests.cs b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Import/UpsertSPRelationTests.cs new file mode 100644 index 0000000000..00152dbdf7 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Import/UpsertSPRelationTests.cs @@ -0,0 +1,62 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Linq; +using System.Reflection; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Xunit; + +namespace Microsoft.Health.Fhir.SqlServer.UnitTests.Features.Import +{ + public class UpsertSPRelationTests + { + /// + /// New parameters added to upsert resource stored procedure might impact bulk import operation. + /// Please contact import feature ownert to review the change if this test case fail. + /// + [Fact] + public void GivenUpsertResourceStoredProcedure_WhenNewParameterAdded_ThenBulkImportShouldSupportNewParameters() + { + string[] bulkImportSupportedParametersForResourceUpsert = new string[] + { + "command", + "baseResourceSurrogateId", + "resourceTypeId", + "resourceId", + "eTag", + "allowCreate", + "isDeleted", + "keepHistory", + "requestMethod", + "searchParamHash", + "rawResource", + "resourceWriteClaims", + "compartmentAssignments", + "referenceSearchParams", + "tokenSearchParams", + "tokenTextSearchParams", + "stringSearchParams", + "numberSearchParams", + "quantitySearchParams", + "uriSearchParams", + "dateTimeSearchParms", + "referenceTokenCompositeSearchParams", + "tokenTokenCompositeSearchParams", + "tokenDateTimeCompositeSearchParams", + "tokenQuantityCompositeSearchParams", + "tokenStringCompositeSearchParams", + "tokenNumberNumberCompositeSearchParams", + "isResourceChangeCaptureEnabled", + }; + MethodInfo methodInfo = typeof(VLatest.UpsertResourceProcedure).GetMethods().Where(m => m.Name.Equals("PopulateCommand")).OrderBy(m => -m.GetParameters().Count()).First(); + string[] upsertStoredProcedureParameters = methodInfo.GetParameters().Select(p => p.Name).ToArray(); + Assert.Equal(bulkImportSupportedParametersForResourceUpsert.Length, upsertStoredProcedureParameters.Length); + foreach (string parameterName in upsertStoredProcedureParameters) + { + Assert.Contains(parameterName, bulkImportSupportedParametersForResourceUpsert); + } + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Storage/CompressedRawResourceConverterTests.cs b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Storage/CompressedRawResourceConverterTests.cs index ae9a8c6e0a..1979306cb1 100644 --- a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Storage/CompressedRawResourceConverterTests.cs +++ b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Storage/CompressedRawResourceConverterTests.cs @@ -18,11 +18,12 @@ public async Task ResourceWithCurrentEncoding_WhenDecoded_ProducesCorrectResult( { string data = "Hello 😊"; + CompressedRawResourceConverter converter = new CompressedRawResourceConverter(); using var stream = new MemoryStream(); - CompressedRawResourceConverter.WriteCompressedRawResource(stream, data); + converter.WriteCompressedRawResource(stream, data); stream.Seek(0, 0); - string actual = await CompressedRawResourceConverter.ReadCompressedRawResource(stream); + string actual = await converter.ReadCompressedRawResource(stream); Assert.Equal(data, actual); } @@ -31,6 +32,7 @@ public async Task ResourceWithLegacyEncoding_WhenDecoded_ProducesCorrectResult() { string data = "Hello 😊"; + CompressedRawResourceConverter converter = new CompressedRawResourceConverter(); using var stream = new MemoryStream(); using var gzipStream = new GZipStream(stream, CompressionMode.Compress); using var writer = new StreamWriter(gzipStream, CompressedRawResourceConverter.LegacyResourceEncoding); @@ -39,7 +41,7 @@ public async Task ResourceWithLegacyEncoding_WhenDecoded_ProducesCorrectResult() writer.Flush(); stream.Seek(0, 0); - string actual = await CompressedRawResourceConverter.ReadCompressedRawResource(stream); + string actual = await converter.ReadCompressedRawResource(stream); Assert.Equal(data, actual); } } diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/CompartmentAssignmentTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/CompartmentAssignmentTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..62330b5be4 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/CompartmentAssignmentTableBulkCopyDataGenerator.cs @@ -0,0 +1,71 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class CompartmentAssignmentTableBulkCopyDataGenerator : TableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkCompartmentAssignmentTableTypeV1Row> _generator; + + internal CompartmentAssignmentTableBulkCopyDataGenerator() + { + } + + public CompartmentAssignmentTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkCompartmentAssignmentTableTypeV1Row> generator) + { + EnsureArg.IsNotNull(generator, nameof(generator)); + + _generator = generator; + } + + internal override string TableName + { + get + { + return VLatest.CompartmentAssignment.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + foreach (var rowData in _generator.GenerateRows(new ResourceWrapper[] { input.Resource })) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, rowData); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkCompartmentAssignmentTableTypeV1Row rowData) + { + DataRow newRow = table.NewRow(); + + FillColumn(newRow, VLatest.CompartmentAssignment.ResourceTypeId.Metadata.Name, resourceTypeId); + FillColumn(newRow, VLatest.CompartmentAssignment.ResourceSurrogateId.Metadata.Name, resourceSurrogateId); + FillColumn(newRow, VLatest.CompartmentAssignment.CompartmentTypeId.Metadata.Name, rowData.CompartmentTypeId); + FillColumn(newRow, VLatest.CompartmentAssignment.ReferenceResourceId.Metadata.Name, rowData.ReferenceResourceId); + FillColumn(newRow, VLatest.CompartmentAssignment.IsHistory.Metadata.Name, false); + + table.Rows.Add(newRow); + } + + internal override void FillSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.CompartmentAssignment.ResourceTypeId.Metadata.Name, VLatest.CompartmentAssignment.ResourceTypeId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.CompartmentAssignment.ResourceSurrogateId.Metadata.Name, VLatest.CompartmentAssignment.ResourceSurrogateId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.CompartmentAssignment.CompartmentTypeId.Metadata.Name, VLatest.CompartmentAssignment.CompartmentTypeId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.CompartmentAssignment.ReferenceResourceId.Metadata.Name, VLatest.CompartmentAssignment.ReferenceResourceId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.CompartmentAssignment.IsHistory.Metadata.Name, VLatest.CompartmentAssignment.IsHistory.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/DateTimeSearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/DateTimeSearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..d0aed01bc2 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/DateTimeSearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,68 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class DateTimeSearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkDateTimeSearchParamTableTypeV1Row> _searchParamGenerator; + + internal DateTimeSearchParamsTableBulkCopyDataGenerator() + { + } + + public DateTimeSearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkDateTimeSearchParamTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.DateTimeSearchParam.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkDateTimeSearchParamTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkDateTimeSearchParamTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + FillColumn(newRow, VLatest.DateTimeSearchParam.StartDateTime.Metadata.Name, searchParam.StartDateTime.DateTime); + FillColumn(newRow, VLatest.DateTimeSearchParam.EndDateTime.Metadata.Name, searchParam.EndDateTime.DateTime); + FillColumn(newRow, VLatest.DateTimeSearchParam.IsLongerThanADay.Metadata.Name, searchParam.IsLongerThanADay); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.DateTimeSearchParam.StartDateTime.Metadata.Name, VLatest.DateTimeSearchParam.StartDateTime.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.DateTimeSearchParam.EndDateTime.Metadata.Name, VLatest.DateTimeSearchParam.EndDateTime.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.DateTimeSearchParam.IsLongerThanADay.Metadata.Name, VLatest.DateTimeSearchParam.IsLongerThanADay.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/NumberSearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/NumberSearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..e5ffaa25f7 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/NumberSearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,68 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class NumberSearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkNumberSearchParamTableTypeV1Row> _searchParamGenerator; + + internal NumberSearchParamsTableBulkCopyDataGenerator() + { + } + + public NumberSearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkNumberSearchParamTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.NumberSearchParam.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkNumberSearchParamTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkNumberSearchParamTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + FillColumn(newRow, VLatest.NumberSearchParam.SingleValue.Metadata.Name, searchParam.SingleValue); + FillColumn(newRow, VLatest.NumberSearchParam.LowValue.Metadata.Name, searchParam.LowValue); + FillColumn(newRow, VLatest.NumberSearchParam.HighValue.Metadata.Name, searchParam.HighValue); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.NumberSearchParam.SingleValue.Metadata.Name, VLatest.NumberSearchParam.SingleValue.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.NumberSearchParam.LowValue.Metadata.Name, VLatest.NumberSearchParam.LowValue.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.NumberSearchParam.HighValue.Metadata.Name, VLatest.NumberSearchParam.HighValue.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/QuantitySearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/QuantitySearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..8b2e664adb --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/QuantitySearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,72 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class QuantitySearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkQuantitySearchParamTableTypeV1Row> _searchParamGenerator; + + internal QuantitySearchParamsTableBulkCopyDataGenerator() + { + } + + public QuantitySearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkQuantitySearchParamTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.QuantitySearchParam.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkQuantitySearchParamTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkQuantitySearchParamTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + FillColumn(newRow, VLatest.QuantitySearchParam.SystemId.Metadata.Name, searchParam.SystemId); + FillColumn(newRow, VLatest.QuantitySearchParam.QuantityCodeId.Metadata.Name, searchParam.QuantityCodeId); + FillColumn(newRow, VLatest.QuantitySearchParam.SingleValue.Metadata.Name, searchParam.SingleValue); + FillColumn(newRow, VLatest.QuantitySearchParam.LowValue.Metadata.Name, searchParam.LowValue); + FillColumn(newRow, VLatest.QuantitySearchParam.HighValue.Metadata.Name, searchParam.HighValue); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.QuantitySearchParam.SystemId.Metadata.Name, VLatest.QuantitySearchParam.SystemId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.QuantitySearchParam.QuantityCodeId.Metadata.Name, VLatest.QuantitySearchParam.QuantityCodeId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.QuantitySearchParam.SingleValue.Metadata.Name, VLatest.QuantitySearchParam.SingleValue.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.QuantitySearchParam.LowValue.Metadata.Name, VLatest.QuantitySearchParam.LowValue.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.QuantitySearchParam.HighValue.Metadata.Name, VLatest.QuantitySearchParam.HighValue.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ReferenceSearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ReferenceSearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..1c9266469f --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ReferenceSearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,70 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class ReferenceSearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkReferenceSearchParamTableTypeV1Row> _searchParamGenerator; + + internal ReferenceSearchParamsTableBulkCopyDataGenerator() + { + } + + public ReferenceSearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkReferenceSearchParamTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.ReferenceSearchParam.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkReferenceSearchParamTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkReferenceSearchParamTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + FillColumn(newRow, VLatest.ReferenceSearchParam.BaseUri.Metadata.Name, searchParam.BaseUri); + FillColumn(newRow, VLatest.ReferenceSearchParam.ReferenceResourceTypeId.Metadata.Name, searchParam.ReferenceResourceTypeId); + FillColumn(newRow, VLatest.ReferenceSearchParam.ReferenceResourceId.Metadata.Name, searchParam.ReferenceResourceId); + FillColumn(newRow, VLatest.ReferenceSearchParam.ReferenceResourceVersion.Metadata.Name, searchParam.ReferenceResourceVersion); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.ReferenceSearchParam.BaseUri.Metadata.Name, VLatest.ReferenceSearchParam.BaseUri.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.ReferenceSearchParam.ReferenceResourceTypeId.Metadata.Name, VLatest.ReferenceSearchParam.ReferenceResourceTypeId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.ReferenceSearchParam.ReferenceResourceId.Metadata.Name, VLatest.ReferenceSearchParam.ReferenceResourceId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.ReferenceSearchParam.ReferenceResourceVersion.Metadata.Name, VLatest.ReferenceSearchParam.ReferenceResourceVersion.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ReferenceTokenCompositeSearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ReferenceTokenCompositeSearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..0c47092f45 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ReferenceTokenCompositeSearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,74 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class ReferenceTokenCompositeSearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkReferenceTokenCompositeSearchParamTableTypeV1Row> _searchParamGenerator; + + internal ReferenceTokenCompositeSearchParamsTableBulkCopyDataGenerator() + { + } + + public ReferenceTokenCompositeSearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkReferenceTokenCompositeSearchParamTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.ReferenceTokenCompositeSearchParam.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkReferenceTokenCompositeSearchParamTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkReferenceTokenCompositeSearchParamTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + FillColumn(newRow, VLatest.ReferenceTokenCompositeSearchParam.BaseUri1.Metadata.Name, searchParam.BaseUri1); + FillColumn(newRow, VLatest.ReferenceTokenCompositeSearchParam.ReferenceResourceTypeId1.Metadata.Name, searchParam.ReferenceResourceTypeId1); + FillColumn(newRow, VLatest.ReferenceTokenCompositeSearchParam.ReferenceResourceId1.Metadata.Name, searchParam.ReferenceResourceId1); + FillColumn(newRow, VLatest.ReferenceTokenCompositeSearchParam.ReferenceResourceVersion1.Metadata.Name, searchParam.ReferenceResourceVersion1); + FillColumn(newRow, VLatest.ReferenceTokenCompositeSearchParam.SystemId2.Metadata.Name, searchParam.SystemId2); + FillColumn(newRow, VLatest.ReferenceTokenCompositeSearchParam.Code2.Metadata.Name, searchParam.Code2); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.ReferenceTokenCompositeSearchParam.BaseUri1.Metadata.Name, VLatest.ReferenceTokenCompositeSearchParam.BaseUri1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.ReferenceTokenCompositeSearchParam.ReferenceResourceTypeId1.Metadata.Name, VLatest.ReferenceTokenCompositeSearchParam.ReferenceResourceTypeId1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.ReferenceTokenCompositeSearchParam.ReferenceResourceId1.Metadata.Name, VLatest.ReferenceTokenCompositeSearchParam.ReferenceResourceId1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.ReferenceTokenCompositeSearchParam.ReferenceResourceVersion1.Metadata.Name, VLatest.ReferenceTokenCompositeSearchParam.ReferenceResourceVersion1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.ReferenceTokenCompositeSearchParam.SystemId2.Metadata.Name, VLatest.ReferenceTokenCompositeSearchParam.SystemId2.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.ReferenceTokenCompositeSearchParam.Code2.Metadata.Name, VLatest.ReferenceTokenCompositeSearchParam.Code2.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ResourceTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ResourceTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..5644be6ca6 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ResourceTableBulkCopyDataGenerator.cs @@ -0,0 +1,64 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class ResourceTableBulkCopyDataGenerator : TableBulkCopyDataGenerator + { + private const string ImportMethod = "PUT"; + + internal override string TableName + { + get + { + return VLatest.Resource.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + FillDataTable(table, input.ResourceTypeId, input.Resource.ResourceId, input.ResourceSurrogateId, input.CompressedRawData, input.Resource.SearchParameterHash); + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, string resourceId, long resourceSurrogateId, byte[] data, string searchParameterHash) + { + DataRow newRow = table.NewRow(); + + FillColumn(newRow, VLatest.Resource.ResourceTypeId.Metadata.Name, resourceTypeId); + FillColumn(newRow, VLatest.Resource.ResourceId.Metadata.Name, resourceId); + FillColumn(newRow, VLatest.Resource.Version.Metadata.Name, 1); + FillColumn(newRow, VLatest.Resource.IsHistory.Metadata.Name, false); + FillColumn(newRow, VLatest.Resource.ResourceSurrogateId.Metadata.Name, resourceSurrogateId); + FillColumn(newRow, VLatest.Resource.IsDeleted.Metadata.Name, false); + FillColumn(newRow, VLatest.Resource.RequestMethod.Metadata.Name, ImportMethod); + FillColumn(newRow, VLatest.Resource.RawResource.Metadata.Name, data); + FillColumn(newRow, VLatest.Resource.IsRawResourceMetaSet.Metadata.Name, true); + FillColumn(newRow, VLatest.Resource.SearchParamHash.Metadata.Name, searchParameterHash); + + table.Rows.Add(newRow); + } + + internal override void FillSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.Resource.ResourceTypeId.Metadata.Name, VLatest.Resource.ResourceTypeId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.Resource.ResourceId.Metadata.Name, VLatest.Resource.ResourceId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.Resource.Version.Metadata.Name, VLatest.Resource.Version.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.Resource.IsHistory.Metadata.Name, VLatest.Resource.IsHistory.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.Resource.ResourceSurrogateId.Metadata.Name, VLatest.Resource.ResourceSurrogateId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.Resource.IsDeleted.Metadata.Name, VLatest.Resource.IsDeleted.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.Resource.RequestMethod.Metadata.Name, VLatest.Resource.RequestMethod.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.Resource.RawResource.Metadata.Name, VLatest.Resource.RawResource.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.Resource.IsRawResourceMetaSet.Metadata.Name, VLatest.Resource.IsRawResourceMetaSet.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.Resource.SearchParamHash.Metadata.Name, VLatest.Resource.SearchParamHash.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ResourceWriteClaimTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ResourceWriteClaimTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..7db7d2069d --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/ResourceWriteClaimTableBulkCopyDataGenerator.cs @@ -0,0 +1,69 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class ResourceWriteClaimTableBulkCopyDataGenerator : TableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkResourceWriteClaimTableTypeV1Row> _generator; + + internal ResourceWriteClaimTableBulkCopyDataGenerator() + { + } + + public ResourceWriteClaimTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkResourceWriteClaimTableTypeV1Row> generator) + { + EnsureArg.IsNotNull(generator, nameof(generator)); + + _generator = generator; + } + + internal override string TableName + { + get + { + return VLatest.ResourceWriteClaim.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable claims = _generator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (var claim in claims) + { + FillDataTable(table, input.ResourceSurrogateId, claim); + } + } + + internal static void FillDataTable(DataTable table, long resourceSurrogateId, BulkResourceWriteClaimTableTypeV1Row claim) + { + DataRow newRow = table.NewRow(); + + FillColumn(newRow, VLatest.ResourceWriteClaim.ResourceSurrogateId.Metadata.Name, resourceSurrogateId); + FillColumn(newRow, VLatest.ResourceWriteClaim.ClaimTypeId.Metadata.Name, claim.ClaimTypeId); + FillColumn(newRow, VLatest.ResourceWriteClaim.ClaimValue.Metadata.Name, claim.ClaimValue); + + table.Rows.Add(newRow); + } + + internal override void FillSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.ResourceWriteClaim.ResourceSurrogateId.Metadata.Name, VLatest.ResourceWriteClaim.ResourceSurrogateId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.ResourceWriteClaim.ClaimTypeId.Metadata.Name, VLatest.ResourceWriteClaim.ClaimTypeId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.ResourceWriteClaim.ClaimValue.Metadata.Name, VLatest.ResourceWriteClaim.ClaimValue.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/SearchParamtersTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/SearchParamtersTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..df788aa03c --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/SearchParamtersTableBulkCopyDataGenerator.cs @@ -0,0 +1,40 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Data; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal abstract class SearchParamtersTableBulkCopyDataGenerator : TableBulkCopyDataGenerator + { + internal static readonly SmallIntColumn ResourceTypeId = new SmallIntColumn("ResourceTypeId"); + internal static readonly BigIntColumn ResourceSurrogateId = new BigIntColumn("ResourceSurrogateId"); + internal static readonly SmallIntColumn SearchParamId = new SmallIntColumn("SearchParamId"); + internal static readonly BitColumn IsHistory = new BitColumn("IsHistory"); + + internal override void FillSchema(DataTable table) + { + table.Columns.Add(new DataColumn(ResourceTypeId.Metadata.Name, ResourceTypeId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(ResourceSurrogateId.Metadata.Name, ResourceSurrogateId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(SearchParamId.Metadata.Name, SearchParamId.Metadata.SqlDbType.GetGeneralType())); + FillSearchParamsSchema(table); + table.Columns.Add(new DataColumn(IsHistory.Metadata.Name, IsHistory.Metadata.SqlDbType.GetGeneralType())); + } + + internal abstract void FillSearchParamsSchema(DataTable table); + + internal static DataRow CreateNewRowWithCommonProperties(DataTable table, short resourceTypeId, long resourceSurrogateId, short searchParamId) + { + DataRow newRow = table.NewRow(); + newRow[ResourceTypeId.Metadata.Name] = resourceTypeId; + newRow[ResourceSurrogateId.Metadata.Name] = resourceSurrogateId; + newRow[SearchParamId.Metadata.Name] = searchParamId; + newRow[IsHistory.Metadata.Name] = false; + + return newRow; + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/StringSearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/StringSearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..0ceb9709e7 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/StringSearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,66 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class StringSearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkStringSearchParamTableTypeV1Row> _searchParamGenerator; + + internal StringSearchParamsTableBulkCopyDataGenerator() + { + } + + public StringSearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkStringSearchParamTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.StringSearchParam.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkStringSearchParamTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkStringSearchParamTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + FillColumn(newRow, VLatest.StringSearchParam.Text.Metadata.Name, searchParam.Text); + FillColumn(newRow, VLatest.StringSearchParam.TextOverflow.Metadata.Name, searchParam.TextOverflow); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.StringSearchParam.Text.Metadata.Name, VLatest.StringSearchParam.Text.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.StringSearchParam.TextOverflow.Metadata.Name, VLatest.StringSearchParam.TextOverflow.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..b59634f23a --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TableBulkCopyDataGenerator.cs @@ -0,0 +1,32 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Data; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal abstract class TableBulkCopyDataGenerator + { + internal abstract string TableName { get; } + + public DataTable GenerateDataTable() + { + DataTable table = new DataTable(TableName); + FillSchema(table); + + return table; + } + + internal abstract void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input); + + internal abstract void FillSchema(DataTable table); + + internal static void FillColumn(DataRow row, string name, object value) + { + row[name] = value == null ? DBNull.Value : value; + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..64563a7991 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,73 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class TokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkTokenDateTimeCompositeSearchParamTableTypeV1Row> _searchParamGenerator; + + internal TokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator() + { + } + + public TokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkTokenDateTimeCompositeSearchParamTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.TokenDateTimeCompositeSearchParam.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkTokenDateTimeCompositeSearchParamTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkTokenDateTimeCompositeSearchParamTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + + FillColumn(newRow, VLatest.TokenDateTimeCompositeSearchParam.SystemId1.Metadata.Name, searchParam.SystemId1); + FillColumn(newRow, VLatest.TokenDateTimeCompositeSearchParam.Code1.Metadata.Name, searchParam.Code1); + FillColumn(newRow, VLatest.TokenDateTimeCompositeSearchParam.StartDateTime2.Metadata.Name, searchParam.StartDateTime2.DateTime); + FillColumn(newRow, VLatest.TokenDateTimeCompositeSearchParam.EndDateTime2.Metadata.Name, searchParam.EndDateTime2.DateTime); + FillColumn(newRow, VLatest.TokenDateTimeCompositeSearchParam.IsLongerThanADay2.Metadata.Name, searchParam.IsLongerThanADay2); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.TokenDateTimeCompositeSearchParam.SystemId1.Metadata.Name, VLatest.TokenDateTimeCompositeSearchParam.SystemId1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenDateTimeCompositeSearchParam.Code1.Metadata.Name, VLatest.TokenDateTimeCompositeSearchParam.Code1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenDateTimeCompositeSearchParam.StartDateTime2.Metadata.Name, VLatest.TokenDateTimeCompositeSearchParam.StartDateTime2.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenDateTimeCompositeSearchParam.EndDateTime2.Metadata.Name, VLatest.TokenDateTimeCompositeSearchParam.EndDateTime2.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenDateTimeCompositeSearchParam.IsLongerThanADay2.Metadata.Name, VLatest.TokenDateTimeCompositeSearchParam.IsLongerThanADay2.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..5a78329722 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,80 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class TokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkTokenNumberNumberCompositeSearchParamTableTypeV1Row> _searchParamGenerator; + + internal TokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator() + { + } + + public TokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkTokenNumberNumberCompositeSearchParamTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.TokenNumberNumberCompositeSearchParam.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkTokenNumberNumberCompositeSearchParamTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkTokenNumberNumberCompositeSearchParamTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + FillColumn(newRow, VLatest.TokenNumberNumberCompositeSearchParam.SystemId1.Metadata.Name, searchParam.SystemId1); + FillColumn(newRow, VLatest.TokenNumberNumberCompositeSearchParam.Code1.Metadata.Name, searchParam.Code1); + FillColumn(newRow, VLatest.TokenNumberNumberCompositeSearchParam.SingleValue2.Metadata.Name, searchParam.SingleValue2); + FillColumn(newRow, VLatest.TokenNumberNumberCompositeSearchParam.LowValue2.Metadata.Name, searchParam.LowValue2); + FillColumn(newRow, VLatest.TokenNumberNumberCompositeSearchParam.HighValue2.Metadata.Name, searchParam.HighValue2); + FillColumn(newRow, VLatest.TokenNumberNumberCompositeSearchParam.SingleValue3.Metadata.Name, searchParam.SingleValue3); + FillColumn(newRow, VLatest.TokenNumberNumberCompositeSearchParam.LowValue3.Metadata.Name, searchParam.LowValue3); + FillColumn(newRow, VLatest.TokenNumberNumberCompositeSearchParam.HighValue3.Metadata.Name, searchParam.HighValue3); + FillColumn(newRow, VLatest.TokenNumberNumberCompositeSearchParam.HasRange.Metadata.Name, searchParam.HasRange); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.TokenNumberNumberCompositeSearchParam.SystemId1.Metadata.Name, VLatest.TokenNumberNumberCompositeSearchParam.SystemId1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenNumberNumberCompositeSearchParam.Code1.Metadata.Name, VLatest.TokenNumberNumberCompositeSearchParam.Code1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenNumberNumberCompositeSearchParam.SingleValue2.Metadata.Name, VLatest.TokenNumberNumberCompositeSearchParam.SingleValue2.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenNumberNumberCompositeSearchParam.LowValue2.Metadata.Name, VLatest.TokenNumberNumberCompositeSearchParam.LowValue2.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenNumberNumberCompositeSearchParam.HighValue2.Metadata.Name, VLatest.TokenNumberNumberCompositeSearchParam.HighValue2.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenNumberNumberCompositeSearchParam.SingleValue3.Metadata.Name, VLatest.TokenNumberNumberCompositeSearchParam.SingleValue3.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenNumberNumberCompositeSearchParam.LowValue3.Metadata.Name, VLatest.TokenNumberNumberCompositeSearchParam.LowValue3.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenNumberNumberCompositeSearchParam.HighValue3.Metadata.Name, VLatest.TokenNumberNumberCompositeSearchParam.HighValue3.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenNumberNumberCompositeSearchParam.HasRange.Metadata.Name, VLatest.TokenNumberNumberCompositeSearchParam.HasRange.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..13f048b94c --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,76 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class TokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkTokenQuantityCompositeSearchParamTableTypeV1Row> _searchParamGenerator; + + internal TokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator() + { + } + + public TokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkTokenQuantityCompositeSearchParamTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.TokenQuantityCompositeSearchParam.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkTokenQuantityCompositeSearchParamTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkTokenQuantityCompositeSearchParamTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + FillColumn(newRow, VLatest.TokenQuantityCompositeSearchParam.SystemId1.Metadata.Name, searchParam.SystemId1); + FillColumn(newRow, VLatest.TokenQuantityCompositeSearchParam.Code1.Metadata.Name, searchParam.Code1); + FillColumn(newRow, VLatest.TokenQuantityCompositeSearchParam.SystemId2.Metadata.Name, searchParam.SystemId2); + FillColumn(newRow, VLatest.TokenQuantityCompositeSearchParam.QuantityCodeId2.Metadata.Name, searchParam.QuantityCodeId2); + FillColumn(newRow, VLatest.TokenQuantityCompositeSearchParam.SingleValue2.Metadata.Name, searchParam.SingleValue2); + FillColumn(newRow, VLatest.TokenQuantityCompositeSearchParam.LowValue2.Metadata.Name, searchParam.LowValue2); + FillColumn(newRow, VLatest.TokenQuantityCompositeSearchParam.HighValue2.Metadata.Name, searchParam.HighValue2); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.TokenQuantityCompositeSearchParam.SystemId1.Metadata.Name, VLatest.TokenQuantityCompositeSearchParam.SystemId1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenQuantityCompositeSearchParam.Code1.Metadata.Name, VLatest.TokenQuantityCompositeSearchParam.Code1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenQuantityCompositeSearchParam.SystemId2.Metadata.Name, VLatest.TokenQuantityCompositeSearchParam.SystemId2.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenQuantityCompositeSearchParam.QuantityCodeId2.Metadata.Name, VLatest.TokenQuantityCompositeSearchParam.QuantityCodeId2.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenQuantityCompositeSearchParam.SingleValue2.Metadata.Name, VLatest.TokenQuantityCompositeSearchParam.SingleValue2.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenQuantityCompositeSearchParam.LowValue2.Metadata.Name, VLatest.TokenQuantityCompositeSearchParam.LowValue2.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenQuantityCompositeSearchParam.HighValue2.Metadata.Name, VLatest.TokenQuantityCompositeSearchParam.HighValue2.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenSearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenSearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..767ab0a4b0 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenSearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,66 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class TokenSearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkTokenSearchParamTableTypeV1Row> _searchParamGenerator; + + internal TokenSearchParamsTableBulkCopyDataGenerator() + { + } + + public TokenSearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkTokenSearchParamTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.TokenSearchParam.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkTokenSearchParamTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkTokenSearchParamTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + FillColumn(newRow, VLatest.TokenSearchParam.SystemId.Metadata.Name, searchParam.SystemId); + FillColumn(newRow, VLatest.TokenSearchParam.Code.Metadata.Name, searchParam.Code); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.TokenSearchParam.SystemId.Metadata.Name, VLatest.TokenSearchParam.SystemId.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenSearchParam.Code.Metadata.Name, VLatest.TokenSearchParam.Code.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenStringCompositeSearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenStringCompositeSearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..1fed9910fe --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenStringCompositeSearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,70 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class TokenStringCompositeSearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkTokenStringCompositeSearchParamTableTypeV1Row> _searchParamGenerator; + + internal TokenStringCompositeSearchParamsTableBulkCopyDataGenerator() + { + } + + public TokenStringCompositeSearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkTokenStringCompositeSearchParamTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.TokenStringCompositeSearchParam.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkTokenStringCompositeSearchParamTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkTokenStringCompositeSearchParamTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + FillColumn(newRow, VLatest.TokenStringCompositeSearchParam.SystemId1.Metadata.Name, searchParam.SystemId1); + FillColumn(newRow, VLatest.TokenStringCompositeSearchParam.Code1.Metadata.Name, searchParam.Code1); + FillColumn(newRow, VLatest.TokenStringCompositeSearchParam.Text2.Metadata.Name, searchParam.Text2); + FillColumn(newRow, VLatest.TokenStringCompositeSearchParam.TextOverflow2.Metadata.Name, searchParam.TextOverflow2); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.TokenStringCompositeSearchParam.SystemId1.Metadata.Name, VLatest.TokenStringCompositeSearchParam.SystemId1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenStringCompositeSearchParam.Code1.Metadata.Name, VLatest.TokenStringCompositeSearchParam.Code1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenStringCompositeSearchParam.Text2.Metadata.Name, VLatest.TokenStringCompositeSearchParam.Text2.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenStringCompositeSearchParam.TextOverflow2.Metadata.Name, VLatest.TokenStringCompositeSearchParam.TextOverflow2.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenTextSearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenTextSearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..a8a89e5688 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenTextSearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,64 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class TokenTextSearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkTokenTextTableTypeV1Row> _searchParamGenerator; + + internal TokenTextSearchParamsTableBulkCopyDataGenerator() + { + } + + public TokenTextSearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkTokenTextTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.TokenText.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkTokenTextTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkTokenTextTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + FillColumn(newRow, VLatest.TokenText.Text.Metadata.Name, searchParam.Text); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.TokenText.Text.Metadata.Name, VLatest.TokenText.Text.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenTokenCompositeSearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenTokenCompositeSearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..52b32e34d4 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/TokenTokenCompositeSearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,70 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class TokenTokenCompositeSearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkTokenTokenCompositeSearchParamTableTypeV1Row> _searchParamGenerator; + + internal TokenTokenCompositeSearchParamsTableBulkCopyDataGenerator() + { + } + + public TokenTokenCompositeSearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkTokenTokenCompositeSearchParamTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.TokenTokenCompositeSearchParam.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkTokenTokenCompositeSearchParamTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkTokenTokenCompositeSearchParamTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + FillColumn(newRow, VLatest.TokenTokenCompositeSearchParam.SystemId1.Metadata.Name, searchParam.SystemId1); + FillColumn(newRow, VLatest.TokenTokenCompositeSearchParam.Code1.Metadata.Name, searchParam.Code1); + FillColumn(newRow, VLatest.TokenTokenCompositeSearchParam.SystemId2.Metadata.Name, searchParam.SystemId2); + FillColumn(newRow, VLatest.TokenTokenCompositeSearchParam.Code2.Metadata.Name, searchParam.Code2); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.TokenTokenCompositeSearchParam.SystemId1.Metadata.Name, VLatest.TokenTokenCompositeSearchParam.SystemId1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenTokenCompositeSearchParam.Code1.Metadata.Name, VLatest.TokenTokenCompositeSearchParam.Code1.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenTokenCompositeSearchParam.SystemId2.Metadata.Name, VLatest.TokenTokenCompositeSearchParam.SystemId2.Metadata.SqlDbType.GetGeneralType())); + table.Columns.Add(new DataColumn(VLatest.TokenTokenCompositeSearchParam.Code2.Metadata.Name, VLatest.TokenTokenCompositeSearchParam.Code2.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/UriSearchParamsTableBulkCopyDataGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/UriSearchParamsTableBulkCopyDataGenerator.cs new file mode 100644 index 0000000000..15b8815806 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/DataGenerator/UriSearchParamsTableBulkCopyDataGenerator.cs @@ -0,0 +1,64 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator +{ + internal class UriSearchParamsTableBulkCopyDataGenerator : SearchParamtersTableBulkCopyDataGenerator + { + private ITableValuedParameterRowGenerator, BulkUriSearchParamTableTypeV1Row> _searchParamGenerator; + + internal UriSearchParamsTableBulkCopyDataGenerator() + { + } + + public UriSearchParamsTableBulkCopyDataGenerator(ITableValuedParameterRowGenerator, BulkUriSearchParamTableTypeV1Row> searchParamGenerator) + { + EnsureArg.IsNotNull(searchParamGenerator, nameof(searchParamGenerator)); + + _searchParamGenerator = searchParamGenerator; + } + + internal override string TableName + { + get + { + return VLatest.UriSearchParam.TableName; + } + } + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + EnsureArg.IsNotNull(table, nameof(table)); + EnsureArg.IsNotNull(input, nameof(input)); + + IEnumerable searchParams = _searchParamGenerator.GenerateRows(new ResourceWrapper[] { input.Resource }); + + foreach (BulkUriSearchParamTableTypeV1Row searchParam in searchParams) + { + FillDataTable(table, input.ResourceTypeId, input.ResourceSurrogateId, searchParam); + } + } + + internal static void FillDataTable(DataTable table, short resourceTypeId, long resourceSurrogateId, BulkUriSearchParamTableTypeV1Row searchParam) + { + DataRow newRow = CreateNewRowWithCommonProperties(table, resourceTypeId, resourceSurrogateId, searchParam.SearchParamId); + FillColumn(newRow, VLatest.UriSearchParam.Uri.Metadata.Name, searchParam.Uri); + + table.Rows.Add(newRow); + } + + internal override void FillSearchParamsSchema(DataTable table) + { + table.Columns.Add(new DataColumn(VLatest.UriSearchParam.Uri.Metadata.Name, VLatest.UriSearchParam.Uri.Metadata.SqlDbType.GetGeneralType())); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ISqlBulkCopyDataWrapperFactory.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ISqlBulkCopyDataWrapperFactory.cs new file mode 100644 index 0000000000..357cc7d393 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ISqlBulkCopyDataWrapperFactory.cs @@ -0,0 +1,25 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Threading.Tasks; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import +{ + public interface ISqlBulkCopyDataWrapperFactory + { + /// + /// Create sql bulk copy wrapper, extract necessary information. + /// + /// Import Resource + /// Bulk copy wrapper + public SqlBulkCopyDataWrapper CreateSqlBulkCopyDataWrapper(ImportResource resource); + + /// + /// Ensure the sql db initialized. + /// + public Task EnsureInitializedAsync(); + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ISqlImportOperation.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ISqlImportOperation.cs new file mode 100644 index 0000000000..4f460da65b --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ISqlImportOperation.cs @@ -0,0 +1,38 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import +{ + public interface ISqlImportOperation + { + /// + /// Clean resources and params by resource type and sequence id range. + /// + /// FHIR Resource Type + /// Begin sequence id. + /// End sequence id. + /// Cancellation Token + public Task CleanBatchResourceAsync(string resourceType, long beginSequenceId, long endSequenceId, CancellationToken cancellationToken); + + /// + /// Copy table to data store. + /// + /// Input data table. + /// Cancellation Token + public Task BulkCopyDataAsync(DataTable dataTable, CancellationToken cancellationToken); + + /// + /// Merge resources to resource table. + /// + /// Input resources content. + /// Cancellation Token + public Task> BulkMergeResourceAsync(IEnumerable resources, CancellationToken cancellationToken); + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportResourceSqlExtentions.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportResourceSqlExtentions.cs new file mode 100644 index 0000000000..a67144b273 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportResourceSqlExtentions.cs @@ -0,0 +1,23 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import +{ + internal static class ImportResourceSqlExtentions + { + internal static BulkImportResourceTypeV1Row ExtractBulkImportResourceTypeV1Row(this ImportResource importResource, short resourceTypeId) + { + return new BulkImportResourceTypeV1Row(resourceTypeId, importResource.Resource.ResourceId, 0, false, importResource.Id, false, "POST", importResource.CompressedStream, true, importResource.Resource.SearchParameterHash); + } + + internal static bool ContainsError(this ImportResource importResource) + { + return !string.IsNullOrEmpty(importResource.ImportError); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlBulkCopyDataWrapper.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlBulkCopyDataWrapper.cs new file mode 100644 index 0000000000..a7c64fd369 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlBulkCopyDataWrapper.cs @@ -0,0 +1,64 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.Fhir.SqlServer.Features.Storage; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import +{ + public class SqlBulkCopyDataWrapper : IEquatable + { + /// + /// FHIR resource metadata for SQL + /// + internal ResourceMetadata Metadata { get; set; } + + /// + /// Resource type id for sql mapping + /// + public short ResourceTypeId { get; set; } + + /// + /// Assigned resource surrogate id + /// + public long ResourceSurrogateId { get; set; } + + /// + /// Extracted resource wrapper + /// + public ResourceWrapper Resource { get; set; } + + /// + /// Compressed FHIR raw data + /// +#pragma warning disable CA1819 + public byte[] CompressedRawData { get; set; } +#pragma warning restore CA1819 + + /// + /// Index for the resource in file + /// + public long Index { get; set; } + + /// + /// Import resource for sql operation + /// + internal BulkImportResourceTypeV1Row BulkImportResource { get; set; } + + public bool Equals(SqlBulkCopyDataWrapper other) + { + return ResourceSurrogateId.Equals(other.ResourceSurrogateId); + } + + public override bool Equals(object obj) + { + return Equals(obj as SqlBulkCopyDataWrapper); + } + + public override int GetHashCode() => ResourceSurrogateId.GetHashCode(); + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlBulkCopyDataWrapperFactory.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlBulkCopyDataWrapperFactory.cs new file mode 100644 index 0000000000..b5a42df79d --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlBulkCopyDataWrapperFactory.cs @@ -0,0 +1,54 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Linq; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.SqlServer.Features.Storage; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import +{ + internal class SqlBulkCopyDataWrapperFactory : ISqlBulkCopyDataWrapperFactory + { + private SqlServerFhirModel _model; + private SearchParameterToSearchValueTypeMap _searchParameterTypeMap; + + public SqlBulkCopyDataWrapperFactory(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) + { + EnsureArg.IsNotNull(model, nameof(model)); + EnsureArg.IsNotNull(searchParameterTypeMap, nameof(searchParameterTypeMap)); + + _model = model; + _searchParameterTypeMap = searchParameterTypeMap; + } + + public SqlBulkCopyDataWrapper CreateSqlBulkCopyDataWrapper(ImportResource resource) + { + var resourceMetadata = new ResourceMetadata( + resource.Resource.CompartmentIndices, + resource.Resource.SearchIndices?.ToLookup(e => _searchParameterTypeMap.GetSearchValueType(e)), + resource.Resource.LastModifiedClaims); + short resourceTypeId = _model.GetResourceTypeId(resource.Resource.ResourceTypeName); + + resource.CompressedStream.Seek(0, 0); + + return new SqlBulkCopyDataWrapper() + { + Metadata = resourceMetadata, + ResourceTypeId = resourceTypeId, + Resource = resource.Resource, + ResourceSurrogateId = resource.Id, + Index = resource.Index, + BulkImportResource = resource.ExtractBulkImportResourceTypeV1Row(resourceTypeId), + }; + } + + public async Task EnsureInitializedAsync() + { + await _model.EnsureInitialized(); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlDbTypeExtensions.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlDbTypeExtensions.cs new file mode 100644 index 0000000000..12c561394e --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlDbTypeExtensions.cs @@ -0,0 +1,50 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Data; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import +{ + public static class SqlDbTypeExtensions + { + /// + /// Mapping between sql db type to c# paramitive type + /// + internal static readonly Dictionary EquivalentSystemType = new Dictionary + { + { SqlDbType.BigInt, typeof(long) }, + { SqlDbType.Binary, typeof(byte[]) }, + { SqlDbType.Bit, typeof(bool) }, + { SqlDbType.Char, typeof(string) }, + { SqlDbType.Date, typeof(DateTime) }, + { SqlDbType.DateTime, typeof(DateTime) }, + { SqlDbType.DateTime2, typeof(DateTime) }, + { SqlDbType.DateTimeOffset, typeof(DateTimeOffset) }, + { SqlDbType.Decimal, typeof(decimal) }, + { SqlDbType.Float, typeof(double) }, + { SqlDbType.Image, typeof(byte[]) }, + { SqlDbType.Int, typeof(int) }, + { SqlDbType.Money, typeof(decimal) }, + { SqlDbType.NChar, typeof(string) }, + { SqlDbType.NVarChar, typeof(string) }, + { SqlDbType.Real, typeof(float) }, + { SqlDbType.SmallDateTime, typeof(DateTime) }, + { SqlDbType.SmallInt, typeof(short) }, + { SqlDbType.SmallMoney, typeof(decimal) }, + { SqlDbType.Time, typeof(TimeSpan) }, // SQL2008+ + { SqlDbType.TinyInt, typeof(byte) }, + { SqlDbType.UniqueIdentifier, typeof(Guid) }, + { SqlDbType.VarBinary, typeof(byte[]) }, + { SqlDbType.VarChar, typeof(string) }, + }; + + public static Type GetGeneralType(this SqlDbType type) + { + return EquivalentSystemType[type]; + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlImportOperation.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlImportOperation.cs new file mode 100644 index 0000000000..bc8d59057d --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlImportOperation.cs @@ -0,0 +1,388 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Data; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.SqlServer.Features.Operations.Import; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Client; +using Microsoft.Health.SqlServer.Features.Schema.Model; +using Microsoft.IO; +using Index = Microsoft.Health.SqlServer.Features.Schema.Model.Index; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Storage +{ + public class SqlImportOperation : ISqlImportOperation, IImportOrchestratorTaskDataStoreOperation + { + private SqlConnectionWrapperFactory _sqlConnectionWrapperFactory; + private ISqlServerTransientFaultRetryPolicyFactory _sqlServerTransientFaultRetryPolicyFactory; + private SqlServerFhirModel _model; + private readonly RecyclableMemoryStreamManager _memoryStreamManager; + private readonly ImportTaskConfiguration _importTaskConfiguration; + private ILogger _logger; + + public SqlImportOperation( + SqlConnectionWrapperFactory sqlConnectionWrapperFactory, + ISqlServerTransientFaultRetryPolicyFactory sqlServerTransientFaultRetryPolicyFactory, + SqlServerFhirModel model, + IOptions operationsConfig, + ILogger logger) + { + EnsureArg.IsNotNull(sqlConnectionWrapperFactory, nameof(sqlConnectionWrapperFactory)); + EnsureArg.IsNotNull(sqlServerTransientFaultRetryPolicyFactory, nameof(sqlServerTransientFaultRetryPolicyFactory)); + EnsureArg.IsNotNull(model, nameof(model)); + EnsureArg.IsNotNull(operationsConfig, nameof(operationsConfig)); + EnsureArg.IsNotNull(logger, nameof(logger)); + + _sqlConnectionWrapperFactory = sqlConnectionWrapperFactory; + _sqlServerTransientFaultRetryPolicyFactory = sqlServerTransientFaultRetryPolicyFactory; + _model = model; + _importTaskConfiguration = operationsConfig.Value.Import; + _logger = logger; + + _memoryStreamManager = new RecyclableMemoryStreamManager(); + } + + public static IReadOnlyList<(Table table, Index index)> OptionalUniqueIndexesForImport { get; } = + new List<(Table table, Index index)>() + { + (VLatest.Resource, VLatest.Resource.IX_Resource_ResourceTypeId_ResourceId), + (VLatest.Resource, VLatest.Resource.IX_Resource_ResourceTypeId_ResourceSurrgateId), + }; + + public static IReadOnlyList<(Table table, Index index)> OptionalIndexesForImport { get; } = + new List<(Table table, Index index)>() + { + (VLatest.Resource, VLatest.Resource.IX_Resource_ResourceSurrogateId), + (VLatest.CompartmentAssignment, VLatest.CompartmentAssignment.IX_CompartmentAssignment_CompartmentTypeId_ReferenceResourceId), + (VLatest.DateTimeSearchParam, VLatest.DateTimeSearchParam.IX_DateTimeSearchParam_SearchParamId_EndDateTime_StartDateTime), + (VLatest.DateTimeSearchParam, VLatest.DateTimeSearchParam.IX_DateTimeSearchParam_SearchParamId_EndDateTime_StartDateTime_Long), + (VLatest.DateTimeSearchParam, VLatest.DateTimeSearchParam.IX_DateTimeSearchParam_SearchParamId_StartDateTime_EndDateTime), + (VLatest.DateTimeSearchParam, VLatest.DateTimeSearchParam.IX_DateTimeSearchParam_SearchParamId_StartDateTime_EndDateTime_Long), + (VLatest.NumberSearchParam, VLatest.NumberSearchParam.IX_NumberSearchParam_SearchParamId_HighValue_LowValue), + (VLatest.NumberSearchParam, VLatest.NumberSearchParam.IX_NumberSearchParam_SearchParamId_LowValue_HighValue), + (VLatest.NumberSearchParam, VLatest.NumberSearchParam.IX_NumberSearchParam_SearchParamId_SingleValue), + (VLatest.QuantitySearchParam, VLatest.QuantitySearchParam.IX_QuantitySearchParam_SearchParamId_QuantityCodeId_HighValue_LowValue), + (VLatest.QuantitySearchParam, VLatest.QuantitySearchParam.IX_QuantitySearchParam_SearchParamId_QuantityCodeId_LowValue_HighValue), + (VLatest.QuantitySearchParam, VLatest.QuantitySearchParam.IX_QuantitySearchParam_SearchParamId_QuantityCodeId_SingleValue), + (VLatest.ReferenceSearchParam, VLatest.ReferenceSearchParam.IX_ReferenceSearchParam_SearchParamId_ReferenceResourceTypeId_ReferenceResourceId_BaseUri_ReferenceResourceVersion), + (VLatest.ReferenceTokenCompositeSearchParam, VLatest.ReferenceTokenCompositeSearchParam.IX_ReferenceTokenCompositeSearchParam_ReferenceResourceId1_Code2), + (VLatest.StringSearchParam, VLatest.StringSearchParam.IX_StringSearchParam_SearchParamId_Text), + (VLatest.StringSearchParam, VLatest.StringSearchParam.IX_StringSearchParam_SearchParamId_TextWithOverflow), + (VLatest.TokenDateTimeCompositeSearchParam, VLatest.TokenDateTimeCompositeSearchParam.IX_TokenDateTimeCompositeSearchParam_Code1_EndDateTime2_StartDateTime2), + (VLatest.TokenDateTimeCompositeSearchParam, VLatest.TokenDateTimeCompositeSearchParam.IX_TokenDateTimeCompositeSearchParam_Code1_EndDateTime2_StartDateTime2_Long), + (VLatest.TokenDateTimeCompositeSearchParam, VLatest.TokenDateTimeCompositeSearchParam.IX_TokenDateTimeCompositeSearchParam_Code1_StartDateTime2_EndDateTime2), + (VLatest.TokenDateTimeCompositeSearchParam, VLatest.TokenDateTimeCompositeSearchParam.IX_TokenDateTimeCompositeSearchParam_Code1_StartDateTime2_EndDateTime2_Long), + (VLatest.TokenNumberNumberCompositeSearchParam, VLatest.TokenNumberNumberCompositeSearchParam.IX_TokenNumberNumberCompositeSearchParam_SearchParamId_Code1_LowValue2_HighValue2_LowValue3_HighValue3), + (VLatest.TokenNumberNumberCompositeSearchParam, VLatest.TokenNumberNumberCompositeSearchParam.IX_TokenNumberNumberCompositeSearchParam_SearchParamId_Code1_Text2), + (VLatest.TokenQuantityCompositeSearchParam, VLatest.TokenQuantityCompositeSearchParam.IX_TokenQuantityCompositeSearchParam_SearchParamId_Code1_QuantityCodeId2_HighValue2_LowValue2), + (VLatest.TokenQuantityCompositeSearchParam, VLatest.TokenQuantityCompositeSearchParam.IX_TokenQuantityCompositeSearchParam_SearchParamId_Code1_QuantityCodeId2_LowValue2_HighValue2), + (VLatest.TokenQuantityCompositeSearchParam, VLatest.TokenQuantityCompositeSearchParam.IX_TokenQuantityCompositeSearchParam_SearchParamId_Code1_QuantityCodeId2_SingleValue2), + (VLatest.TokenSearchParam, VLatest.TokenSearchParam.IX_TokenSeachParam_SearchParamId_Code_SystemId), + (VLatest.TokenStringCompositeSearchParam, VLatest.TokenStringCompositeSearchParam.IX_TokenStringCompositeSearchParam_SearchParamId_Code1_Text2), + (VLatest.TokenStringCompositeSearchParam, VLatest.TokenStringCompositeSearchParam.IX_TokenStringCompositeSearchParam_SearchParamId_Code1_Text2WithOverflow), + (VLatest.TokenText, VLatest.TokenText.IX_TokenText_SearchParamId_Text), + (VLatest.TokenTokenCompositeSearchParam, VLatest.TokenTokenCompositeSearchParam.IX_TokenTokenCompositeSearchParam_Code1_Code2), + (VLatest.UriSearchParam, VLatest.UriSearchParam.IX_UriSearchParam_SearchParamId_Uri), + + // ResourceWriteClaim Table - No unclustered index + }; + + public static IReadOnlyList SearchParameterTables { get; } = + new List() + { + VLatest.CompartmentAssignment.TableName, + VLatest.ReferenceSearchParam.TableName, + VLatest.TokenSearchParam.TableName, + VLatest.TokenText.TableName, + VLatest.StringSearchParam.TableName, + VLatest.UriSearchParam.TableName, + VLatest.NumberSearchParam.TableName, + VLatest.QuantitySearchParam.TableName, + VLatest.DateTimeSearchParam.TableName, + VLatest.ReferenceTokenCompositeSearchParam.TableName, + VLatest.TokenTokenCompositeSearchParam.TableName, + VLatest.TokenDateTimeCompositeSearchParam.TableName, + VLatest.TokenQuantityCompositeSearchParam.TableName, + VLatest.TokenStringCompositeSearchParam.TableName, + VLatest.TokenNumberNumberCompositeSearchParam.TableName, + }; + + public async Task BulkCopyDataAsync(DataTable dataTable, CancellationToken cancellationToken) + { + using (SqlConnectionWrapper sqlConnectionWrapper = await _sqlConnectionWrapperFactory.ObtainSqlConnectionWrapperAsync(cancellationToken, true)) + using (SqlBulkCopy bulkCopy = new SqlBulkCopy(sqlConnectionWrapper.SqlConnection, SqlBulkCopyOptions.CheckConstraints | SqlBulkCopyOptions.UseInternalTransaction | SqlBulkCopyOptions.KeepNulls, null)) + { + bulkCopy.DestinationTableName = dataTable.TableName; + bulkCopy.BatchSize = dataTable.Rows.Count; + + try + { + await _sqlServerTransientFaultRetryPolicyFactory.Create().ExecuteAsync( + async () => + { + bulkCopy.BulkCopyTimeout = _importTaskConfiguration.SqlBulkOperationTimeoutInSec; + await bulkCopy.WriteToServerAsync(dataTable.CreateDataReader()); + }); + } + catch (Exception ex) + { + _logger.LogInformation(ex, "Failed to bulk copy data."); + + throw; + } + } + } + + public async Task> BulkMergeResourceAsync(IEnumerable resources, CancellationToken cancellationToken) + { + List importedSurrogatedId = new List(); + + // Make sure there's no dup in this batch + resources = resources.GroupBy(r => (r.ResourceTypeId, r.Resource.ResourceId)).Select(r => r.First()); + IEnumerable inputResources = resources.Select(r => r.BulkImportResource); + + using (SqlConnectionWrapper sqlConnectionWrapper = await _sqlConnectionWrapperFactory.ObtainSqlConnectionWrapperAsync(cancellationToken, true)) + using (SqlCommandWrapper sqlCommandWrapper = sqlConnectionWrapper.CreateSqlCommand()) + { + try + { + VLatest.BulkMergeResource.PopulateCommand(sqlCommandWrapper, inputResources); + sqlCommandWrapper.CommandTimeout = _importTaskConfiguration.SqlBulkOperationTimeoutInSec; + + var sqlDataReader = await sqlCommandWrapper.ExecuteReaderAsync(cancellationToken); + + while (await sqlDataReader.ReadAsync(cancellationToken)) + { + long surrogatedId = sqlDataReader.GetInt64(0); + importedSurrogatedId.Add(surrogatedId); + } + + return resources.Where(r => importedSurrogatedId.Contains(r.ResourceSurrogateId)); + } + catch (SqlException sqlEx) + { + _logger.LogError(sqlEx, "Failed to merge resources. " + sqlEx.Message); + + throw; + } + } + } + + public async Task CleanBatchResourceAsync(string resourceType, long beginSequenceId, long endSequenceId, CancellationToken cancellationToken) + { + short resourceTypeId = _model.GetResourceTypeId(resourceType); + + await BatchDeleteResourcesInternalAsync(beginSequenceId, endSequenceId, resourceTypeId, _importTaskConfiguration.SqlCleanResourceBatchSize, cancellationToken); + await BatchDeleteResourceWriteClaimsInternalAsync(beginSequenceId, endSequenceId, _importTaskConfiguration.SqlCleanResourceBatchSize, cancellationToken); + + foreach (var tableName in SearchParameterTables.ToArray()) + { + await BatchDeleteResourceParamsInternalAsync(tableName, beginSequenceId, endSequenceId, resourceTypeId, _importTaskConfiguration.SqlCleanResourceBatchSize, cancellationToken); + } + } + + public async Task PreprocessAsync(CancellationToken cancellationToken) + { + // Not disable index by default + if (_importTaskConfiguration.DisableOptionalIndexesForImport || _importTaskConfiguration.DisableUniqueOptionalIndexesForImport) + { + List<(string tableName, string indexName)> indexesNeedDisable = new List<(string tableName, string indexName)>(); + + if (_importTaskConfiguration.DisableOptionalIndexesForImport) + { + indexesNeedDisable.AddRange(OptionalIndexesForImport.Select(indexRecord => (indexRecord.table.TableName, indexRecord.index.IndexName))); + } + + if (_importTaskConfiguration.DisableUniqueOptionalIndexesForImport) + { + indexesNeedDisable.AddRange(OptionalUniqueIndexesForImport.Select(indexRecord => (indexRecord.table.TableName, indexRecord.index.IndexName))); + } + + foreach (var index in indexesNeedDisable) + { + using (SqlConnectionWrapper sqlConnectionWrapper = await _sqlConnectionWrapperFactory.ObtainSqlConnectionWrapperAsync(cancellationToken, true)) + using (SqlCommandWrapper sqlCommandWrapper = sqlConnectionWrapper.CreateSqlCommand()) + { + try + { + VLatest.DisableIndex.PopulateCommand(sqlCommandWrapper, index.tableName, index.indexName); + await sqlCommandWrapper.ExecuteNonQueryAsync(cancellationToken); + } + catch (SqlException sqlEx) + { + _logger.LogInformation(sqlEx, "Failed to disable indexes."); + + throw; + } + } + } + } + } + + public async Task PostprocessAsync(CancellationToken cancellationToken) + { + // Not rerebuild index by default + if (_importTaskConfiguration.DisableOptionalIndexesForImport || _importTaskConfiguration.DisableUniqueOptionalIndexesForImport) + { + List<(string tableName, string indexName)> indexesNeedRebuild = new List<(string tableName, string indexName)>(); + + if (_importTaskConfiguration.DisableOptionalIndexesForImport) + { + indexesNeedRebuild.AddRange(OptionalIndexesForImport.Select(indexRecord => (indexRecord.table.TableName, indexRecord.index.IndexName))); + } + + if (_importTaskConfiguration.DisableUniqueOptionalIndexesForImport) + { + indexesNeedRebuild.AddRange(OptionalUniqueIndexesForImport.Select(indexRecord => (indexRecord.table.TableName, indexRecord.index.IndexName))); + } + + List> runningTasks = new List>(); + HashSet runningRebuildTables = new HashSet(); + + // rebuild index operation on same table would be blocked, try to parallel run rebuild operation on different table. + while (indexesNeedRebuild.Count > 0) + { + // if all remine indexes' table has some running rebuild operation, need to wait until at least one operation completed. + while (indexesNeedRebuild.All(ix => runningRebuildTables.Contains(ix.tableName)) || runningTasks.Count >= _importTaskConfiguration.SqlMaxRebuildIndexOperationConcurrentCount) + { + Task<(string tableName, string indexName)> completedTask = await Task.WhenAny(runningTasks.ToArray()); + (string tableName, string indexName) indexRebuilt = await completedTask; + + runningRebuildTables.Remove(indexRebuilt.tableName); + runningTasks.Remove(completedTask); + } + + (string tableName, string indexName) nextIndex = indexesNeedRebuild.Where(ix => !runningRebuildTables.Contains(ix.tableName)).First(); + indexesNeedRebuild.Remove(nextIndex); + runningRebuildTables.Add(nextIndex.tableName); + runningTasks.Add(ExecuteRebuildIndexTaskAsync(nextIndex.tableName, nextIndex.indexName, cancellationToken)); + } + + await Task.WhenAll(runningTasks.ToArray()); + } + } + + private async Task<(string tableName, string indexName)> ExecuteRebuildIndexTaskAsync(string tableName, string indexName, CancellationToken cancellationToken) + { + using (SqlConnectionWrapper sqlConnectionWrapper = await _sqlConnectionWrapperFactory.ObtainSqlConnectionWrapperAsync(cancellationToken, true)) + using (SqlCommandWrapper sqlCommandWrapper = sqlConnectionWrapper.CreateSqlCommand()) + { + try + { + sqlCommandWrapper.CommandTimeout = _importTaskConfiguration.SqlLongRunningOperationTimeoutInSec; + + VLatest.RebuildIndex.PopulateCommand(sqlCommandWrapper, tableName, indexName); + await sqlCommandWrapper.ExecuteNonQueryAsync(cancellationToken); + + return (tableName, indexName); + } + catch (SqlException sqlEx) + { + _logger.LogInformation(sqlEx, "Failed to rebuild indexes."); + + throw; + } + } + } + + private async Task BatchDeleteResourcesInternalAsync(long beginSequenceId, long endSequenceId, short resourceTypeId, int batchSize, CancellationToken cancellationToken) + { + while (true) + { + using (SqlConnectionWrapper sqlConnectionWrapper = await _sqlConnectionWrapperFactory.ObtainSqlConnectionWrapperAsync(cancellationToken, true)) + using (SqlCommandWrapper sqlCommandWrapper = sqlConnectionWrapper.CreateSqlCommand()) + { + try + { + sqlCommandWrapper.CommandTimeout = _importTaskConfiguration.SqlBulkOperationTimeoutInSec; + + VLatest.BatchDeleteResources.PopulateCommand(sqlCommandWrapper, resourceTypeId, beginSequenceId, endSequenceId, batchSize); + int impactRows = await sqlCommandWrapper.ExecuteNonQueryAsync(cancellationToken); + + if (impactRows < batchSize) + { + return; + } + } + catch (SqlException sqlEx) + { + _logger.LogInformation(sqlEx, "Failed batch delete Resource."); + + throw; + } + } + } + } + + private async Task BatchDeleteResourceWriteClaimsInternalAsync(long beginSequenceId, long endSequenceId, int batchSize, CancellationToken cancellationToken) + { + while (true) + { + using (SqlConnectionWrapper sqlConnectionWrapper = await _sqlConnectionWrapperFactory.ObtainSqlConnectionWrapperAsync(cancellationToken, true)) + using (SqlCommandWrapper sqlCommandWrapper = sqlConnectionWrapper.CreateSqlCommand()) + { + try + { + sqlCommandWrapper.CommandTimeout = _importTaskConfiguration.SqlBulkOperationTimeoutInSec; + + VLatest.BatchDeleteResourceWriteClaims.PopulateCommand(sqlCommandWrapper, beginSequenceId, endSequenceId, batchSize); + int impactRows = await sqlCommandWrapper.ExecuteNonQueryAsync(cancellationToken); + + if (impactRows < batchSize) + { + return; + } + } + catch (SqlException sqlEx) + { + _logger.LogInformation(sqlEx, "Failed batch delete ResourceWriteClaims."); + + throw; + } + } + } + } + + private async Task BatchDeleteResourceParamsInternalAsync(string tableName, long beginSequenceId, long endSequenceId, short resourceTypeId, int batchSize, CancellationToken cancellationToken) + { + while (true) + { + using (SqlConnectionWrapper sqlConnectionWrapper = await _sqlConnectionWrapperFactory.ObtainSqlConnectionWrapperAsync(cancellationToken, true)) + using (SqlCommandWrapper sqlCommandWrapper = sqlConnectionWrapper.CreateSqlCommand()) + { + try + { + sqlCommandWrapper.CommandTimeout = _importTaskConfiguration.SqlBulkOperationTimeoutInSec; + + VLatest.BatchDeleteResourceParams.PopulateCommand(sqlCommandWrapper, tableName, resourceTypeId, beginSequenceId, endSequenceId, batchSize); + int impactRows = await sqlCommandWrapper.ExecuteNonQueryAsync(cancellationToken); + + if (impactRows < batchSize) + { + return; + } + } + catch (SqlException sqlEx) + { + _logger.LogInformation(sqlEx, "Failed batch delete ResourceParams."); + + throw; + } + } + } + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlResourceBulkImporter.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlResourceBulkImporter.cs new file mode 100644 index 0000000000..2d1a2f127c --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlResourceBulkImporter.cs @@ -0,0 +1,401 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Data; +using System.Linq; +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator; +using Microsoft.Health.TaskManagement; +using Polly; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import +{ + internal class SqlResourceBulkImporter : IResourceBulkImporter + { + private List _generators = new List(); + private ISqlBulkCopyDataWrapperFactory _sqlBulkCopyDataWrapperFactory; + private ISqlImportOperation _sqlImportOperation; + private readonly ImportTaskConfiguration _importTaskConfiguration; + private IImportErrorSerializer _importErrorSerializer; + private ILogger _logger; + + public SqlResourceBulkImporter( + ISqlImportOperation sqlImportOperation, + ISqlBulkCopyDataWrapperFactory sqlBulkCopyDataWrapperFactory, + IImportErrorSerializer importErrorSerializer, + List generators, + IOptions operationsConfig, + ILogger logger) + { + EnsureArg.IsNotNull(sqlImportOperation, nameof(sqlImportOperation)); + EnsureArg.IsNotNull(sqlBulkCopyDataWrapperFactory, nameof(sqlBulkCopyDataWrapperFactory)); + EnsureArg.IsNotNull(importErrorSerializer, nameof(importErrorSerializer)); + EnsureArg.IsNotNull(generators, nameof(generators)); + EnsureArg.IsNotNull(operationsConfig, nameof(operationsConfig)); + EnsureArg.IsNotNull(logger, nameof(logger)); + + _sqlImportOperation = sqlImportOperation; + _sqlBulkCopyDataWrapperFactory = sqlBulkCopyDataWrapperFactory; + _importErrorSerializer = importErrorSerializer; + _generators = generators; + _importTaskConfiguration = operationsConfig.Value.Import; + _logger = logger; + } + + public SqlResourceBulkImporter( + ISqlImportOperation sqlImportOperation, + ISqlBulkCopyDataWrapperFactory sqlBulkCopyDataWrapperFactory, + IImportErrorSerializer importErrorSerializer, + CompartmentAssignmentTableBulkCopyDataGenerator compartmentAssignmentTableBulkCopyDataGenerator, + ResourceWriteClaimTableBulkCopyDataGenerator resourceWriteClaimTableBulkCopyDataGenerator, + DateTimeSearchParamsTableBulkCopyDataGenerator dateTimeSearchParamsTableBulkCopyDataGenerator, + NumberSearchParamsTableBulkCopyDataGenerator numberSearchParamsTableBulkCopyDataGenerator, + QuantitySearchParamsTableBulkCopyDataGenerator quantitySearchParamsTableBulkCopyDataGenerator, + ReferenceSearchParamsTableBulkCopyDataGenerator referenceSearchParamsTableBulkCopyDataGenerator, + ReferenceTokenCompositeSearchParamsTableBulkCopyDataGenerator referenceTokenCompositeSearchParamsTableBulkCopyDataGenerator, + StringSearchParamsTableBulkCopyDataGenerator stringSearchParamsTableBulkCopyDataGenerator, + TokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator tokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator, + TokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator tokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator, + TokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator tokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator, + TokenSearchParamsTableBulkCopyDataGenerator tokenSearchParamsTableBulkCopyDataGenerator, + TokenStringCompositeSearchParamsTableBulkCopyDataGenerator tokenStringCompositeSearchParamsTableBulkCopyDataGenerator, + TokenTextSearchParamsTableBulkCopyDataGenerator tokenTextSearchParamsTableBulkCopyDataGenerator, + TokenTokenCompositeSearchParamsTableBulkCopyDataGenerator tokenTokenCompositeSearchParamsTableBulkCopyDataGenerator, + UriSearchParamsTableBulkCopyDataGenerator uriSearchParamsTableBulkCopyDataGenerator, + IOptions operationsConfig, + ILogger logger) + { + EnsureArg.IsNotNull(sqlImportOperation, nameof(sqlImportOperation)); + EnsureArg.IsNotNull(sqlBulkCopyDataWrapperFactory, nameof(sqlBulkCopyDataWrapperFactory)); + EnsureArg.IsNotNull(importErrorSerializer, nameof(importErrorSerializer)); + EnsureArg.IsNotNull(compartmentAssignmentTableBulkCopyDataGenerator, nameof(compartmentAssignmentTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(resourceWriteClaimTableBulkCopyDataGenerator, nameof(resourceWriteClaimTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(dateTimeSearchParamsTableBulkCopyDataGenerator, nameof(dateTimeSearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(numberSearchParamsTableBulkCopyDataGenerator, nameof(numberSearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(quantitySearchParamsTableBulkCopyDataGenerator, nameof(quantitySearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(referenceSearchParamsTableBulkCopyDataGenerator, nameof(referenceSearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(referenceTokenCompositeSearchParamsTableBulkCopyDataGenerator, nameof(referenceTokenCompositeSearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(stringSearchParamsTableBulkCopyDataGenerator, nameof(stringSearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(tokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator, nameof(tokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(tokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator, nameof(tokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(tokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator, nameof(tokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(tokenSearchParamsTableBulkCopyDataGenerator, nameof(tokenSearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(tokenStringCompositeSearchParamsTableBulkCopyDataGenerator, nameof(tokenStringCompositeSearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(tokenTextSearchParamsTableBulkCopyDataGenerator, nameof(tokenTextSearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(tokenTokenCompositeSearchParamsTableBulkCopyDataGenerator, nameof(tokenTokenCompositeSearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(uriSearchParamsTableBulkCopyDataGenerator, nameof(uriSearchParamsTableBulkCopyDataGenerator)); + EnsureArg.IsNotNull(operationsConfig, nameof(operationsConfig)); + EnsureArg.IsNotNull(logger, nameof(logger)); + + _sqlImportOperation = sqlImportOperation; + _sqlBulkCopyDataWrapperFactory = sqlBulkCopyDataWrapperFactory; + _importErrorSerializer = importErrorSerializer; + + _generators.Add(compartmentAssignmentTableBulkCopyDataGenerator); + _generators.Add(resourceWriteClaimTableBulkCopyDataGenerator); + _generators.Add(dateTimeSearchParamsTableBulkCopyDataGenerator); + _generators.Add(numberSearchParamsTableBulkCopyDataGenerator); + _generators.Add(quantitySearchParamsTableBulkCopyDataGenerator); + _generators.Add(referenceSearchParamsTableBulkCopyDataGenerator); + _generators.Add(referenceTokenCompositeSearchParamsTableBulkCopyDataGenerator); + _generators.Add(stringSearchParamsTableBulkCopyDataGenerator); + _generators.Add(tokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator); + _generators.Add(tokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator); + _generators.Add(tokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator); + _generators.Add(tokenSearchParamsTableBulkCopyDataGenerator); + _generators.Add(tokenStringCompositeSearchParamsTableBulkCopyDataGenerator); + _generators.Add(tokenTextSearchParamsTableBulkCopyDataGenerator); + _generators.Add(tokenTokenCompositeSearchParamsTableBulkCopyDataGenerator); + _generators.Add(uriSearchParamsTableBulkCopyDataGenerator); + + _importTaskConfiguration = operationsConfig.Value.Import; + _logger = logger; + } + + public (Channel progressChannel, Task importTask) Import(Channel inputChannel, IImportErrorStore importErrorStore, CancellationToken cancellationToken) + { + Channel outputChannel = Channel.CreateUnbounded(); + + Task importTask = Task.Run( + async () => + { + await ImportInternalAsync(inputChannel, outputChannel, importErrorStore, cancellationToken); + }, + cancellationToken); + + return (outputChannel, importTask); + } + + public async Task CleanResourceAsync(ImportProcessingTaskInputData inputData, ImportProcessingProgress progress, CancellationToken cancellationToken) + { + if (!progress.NeedCleanData) + { + // Skip clean data step for first run. + return; + } + + long beginSequenceId = inputData.BeginSequenceId; + long endSequenceId = inputData.EndSequenceId; + long endIndex = progress.CurrentIndex; + + try + { + await _sqlBulkCopyDataWrapperFactory.EnsureInitializedAsync(); + await _sqlImportOperation.CleanBatchResourceAsync(inputData.ResourceType, beginSequenceId + endIndex, endSequenceId, cancellationToken); + } + catch (OperationCanceledException) + { + throw; + } + catch (Exception ex) + { + _logger.LogInformation(ex, "Failed to clean batch resource."); + throw new RetriableTaskException("Failed to clean resource before import task start.", ex); + } + } + + private async Task ImportInternalAsync(Channel inputChannel, Channel outputChannel, IImportErrorStore importErrorStore, CancellationToken cancellationToken) + { + try + { + _logger.LogInformation("Start to import data to SQL data store."); + + Task checkpointTask = Task.FromResult(null); + + long succeedCount = 0; + long failedCount = 0; + long? lastCheckpointIndex = null; + long currentIndex = -1; + Dictionary resourceParamsBuffer = new Dictionary(); + List importErrorBuffer = new List(); + Queue> importTasks = new Queue>(); + + List resourceBuffer = new List(); + await _sqlBulkCopyDataWrapperFactory.EnsureInitializedAsync(); + await foreach (ImportResource resource in inputChannel.Reader.ReadAllAsync(cancellationToken)) + { + if (cancellationToken.IsCancellationRequested) + { + throw new OperationCanceledException(); + } + + lastCheckpointIndex = lastCheckpointIndex ?? resource.Index - 1; + currentIndex = resource.Index; + + resourceBuffer.Add(resource); + if (resourceBuffer.Count < _importTaskConfiguration.SqlBatchSizeForImportResourceOperation) + { + continue; + } + + try + { + // Handle resources in buffer + IEnumerable resourcesWithError = resourceBuffer.Where(r => r.ContainsError()); + IEnumerable inputResources = resourceBuffer.Where(r => !r.ContainsError()).Select(r => _sqlBulkCopyDataWrapperFactory.CreateSqlBulkCopyDataWrapper(r)); + IEnumerable mergedResources = await _sqlImportOperation.BulkMergeResourceAsync(inputResources, cancellationToken); + IEnumerable duplicateResourcesNotMerged = inputResources.Except(mergedResources); + + importErrorBuffer.AddRange(resourcesWithError.Select(r => r.ImportError)); + FillResourceParamsBuffer(mergedResources, resourceParamsBuffer); + AppendDuplicatedResouceErrorToBuffer(duplicateResourcesNotMerged, importErrorBuffer); + + succeedCount += mergedResources.Count(); + failedCount += resourcesWithError.Count() + duplicateResourcesNotMerged.Count(); + } + finally + { + foreach (ImportResource importResource in resourceBuffer) + { + importResource?.CompressedStream?.Dispose(); + } + + resourceBuffer.Clear(); + } + + bool shouldCreateCheckpoint = resource.Index - lastCheckpointIndex >= _importTaskConfiguration.SqlImportBatchSizeForCheckpoint; + if (shouldCreateCheckpoint) + { + // Create checkpoint for all tables not empty + string[] tableNameNeedImport = resourceParamsBuffer.Where(r => r.Value.Rows.Count > 0).Select(r => r.Key).ToArray(); + + foreach (string tableName in tableNameNeedImport) + { + DataTable dataTable = resourceParamsBuffer[tableName]; + resourceParamsBuffer.Remove(tableName); + await EnqueueTaskAsync(importTasks, () => ImportDataTableAsync(dataTable, cancellationToken), outputChannel); + } + + // wait previous checkpoint task complete + await checkpointTask; + + // upload error logs for import errors + string[] importErrors = importErrorBuffer.ToArray(); + importErrorBuffer.Clear(); + lastCheckpointIndex = resource.Index; + checkpointTask = await EnqueueTaskAsync(importTasks, () => UploadImportErrorsAsync(importErrorStore, succeedCount, failedCount, importErrors, currentIndex, cancellationToken), outputChannel); + } + else + { + // import table >= MaxResourceCountInBatch + string[] tableNameNeedImport = + resourceParamsBuffer.Where(r => r.Value.Rows.Count >= _importTaskConfiguration.SqlBatchSizeForImportParamsOperation).Select(r => r.Key).ToArray(); + + foreach (string tableName in tableNameNeedImport) + { + DataTable dataTable = resourceParamsBuffer[tableName]; + resourceParamsBuffer.Remove(tableName); + await EnqueueTaskAsync(importTasks, () => ImportDataTableAsync(dataTable, cancellationToken), outputChannel); + } + } + } + + try + { + // Handle resources in buffer + IEnumerable resourcesWithError = resourceBuffer.Where(r => r.ContainsError()); + IEnumerable inputResources = resourceBuffer.Where(r => !r.ContainsError()).Select(r => _sqlBulkCopyDataWrapperFactory.CreateSqlBulkCopyDataWrapper(r)); + IEnumerable mergedResources = await _sqlImportOperation.BulkMergeResourceAsync(inputResources, cancellationToken); + IEnumerable duplicateResourcesNotMerged = inputResources.Except(mergedResources); + importErrorBuffer.AddRange(resourcesWithError.Select(r => r.ImportError)); + + FillResourceParamsBuffer(mergedResources, resourceParamsBuffer); + + AppendDuplicatedResouceErrorToBuffer(duplicateResourcesNotMerged, importErrorBuffer); + succeedCount += mergedResources.Count(); + failedCount += resourcesWithError.Count() + duplicateResourcesNotMerged.Count(); + } + finally + { + foreach (ImportResource importResource in resourceBuffer) + { + importResource?.CompressedStream?.Dispose(); + } + + resourceBuffer.Clear(); + } + + // Import all remain tables + string[] allTablesNotNull = resourceParamsBuffer.Where(r => r.Value.Rows.Count > 0).Select(r => r.Key).ToArray(); + foreach (string tableName in allTablesNotNull) + { + DataTable dataTable = resourceParamsBuffer[tableName]; + await EnqueueTaskAsync(importTasks, () => ImportDataTableAsync(dataTable, cancellationToken), outputChannel); + } + + // Wait all table import task complete + while (importTasks.Count > 0) + { + await importTasks.Dequeue(); + } + + // Upload remain error logs + ImportProcessingProgress progress = await UploadImportErrorsAsync(importErrorStore, succeedCount, failedCount, importErrorBuffer.ToArray(), currentIndex, cancellationToken); + await outputChannel.Writer.WriteAsync(progress, cancellationToken); + } + finally + { + outputChannel.Writer.Complete(); + _logger.LogInformation("Import data to SQL data store complete."); + } + } + + private void FillResourceParamsBuffer(IEnumerable mergedResources, Dictionary resourceParamsBuffer) + { + foreach (SqlBulkCopyDataWrapper resourceWrapper in mergedResources) + { + foreach (TableBulkCopyDataGenerator generator in _generators) + { + if (!resourceParamsBuffer.ContainsKey(generator.TableName)) + { + resourceParamsBuffer[generator.TableName] = generator.GenerateDataTable(); + } + + generator.FillDataTable(resourceParamsBuffer[generator.TableName], resourceWrapper); + } + } + } + + private void AppendDuplicatedResouceErrorToBuffer(IEnumerable mergedResources, List importErrorBuffer) + { + foreach (SqlBulkCopyDataWrapper resourceWrapper in mergedResources) + { + importErrorBuffer.Add(_importErrorSerializer.Serialize(resourceWrapper.Index, string.Format(Resources.FailedToImportForDuplicatedResource, resourceWrapper.Resource.ResourceId, resourceWrapper.Index))); + } + } + + private async Task UploadImportErrorsAsync(IImportErrorStore importErrorStore, long succeedCount, long failedCount, string[] importErrors, long lastIndex, CancellationToken cancellationToken) + { + try + { + await importErrorStore.UploadErrorsAsync(importErrors, cancellationToken); + } + catch (Exception ex) + { + _logger.LogInformation(ex, "Failed to upload error logs."); + throw; + } + + ImportProcessingProgress progress = new ImportProcessingProgress(); + progress.SucceedImportCount = succeedCount; + progress.FailedImportCount = failedCount; + progress.CurrentIndex = lastIndex + 1; + + // Return progress for checkpoint progress + return progress; + } + + private async Task ImportDataTableAsync(DataTable table, CancellationToken cancellationToken) + { + try + { + await Policy.Handle() + .WaitAndRetryAsync( + retryCount: 10, + sleepDurationProvider: (retryCount) => TimeSpan.FromSeconds(5 * (retryCount - 1))) + .ExecuteAsync(async () => + { + await _sqlImportOperation.BulkCopyDataAsync(table, cancellationToken); + }); + + // Return null for non checkpoint progress + return null; + } + catch (Exception ex) + { + _logger.LogInformation(ex, "Failed to import table. {0}", table.TableName); + + throw; + } + } + + private async Task> EnqueueTaskAsync(Queue> importTasks, Func> newTaskFactory, Channel progressChannel) + { + while (importTasks.Count >= _importTaskConfiguration.SqlMaxImportOperationConcurrentCount) + { + ImportProcessingProgress progress = await importTasks.Dequeue(); + if (progress != null) + { + await progressChannel.Writer.WriteAsync(progress); + } + } + + Task newTask = newTaskFactory(); + importTasks.Enqueue(newTask); + + return newTask; + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlResourceMetaPopulator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlResourceMetaPopulator.cs new file mode 100644 index 0000000000..d1347de0af --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlResourceMetaPopulator.cs @@ -0,0 +1,24 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.SqlServer.Features.Storage; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import +{ + internal class SqlResourceMetaPopulator : IResourceMetaPopulator + { + public void Populate(long id, Resource resource) + { + if (resource.Meta == null) + { + resource.Meta = new Meta(); + } + + resource.Meta.LastUpdated = ResourceSurrogateIdHelper.ResourceSurrogateIdToLastUpdated(id); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlStoreSequenceIdGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlStoreSequenceIdGenerator.cs new file mode 100644 index 0000000000..4e7ec47713 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlStoreSequenceIdGenerator.cs @@ -0,0 +1,23 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.SqlServer.Features.Storage; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import +{ + public class SqlStoreSequenceIdGenerator : ISequenceIdGenerator + { + /// + /// Get current surrogateId from datetime + /// + /// Current surrogated id. + public long GetCurrentSequenceId() + { + return ResourceSurrogateIdHelper.LastUpdatedToResourceSurrogateId(DateTime.UtcNow); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/16.diff.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/16.diff.sql index ef409be977..163a5aabfa 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/16.diff.sql +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/16.diff.sql @@ -1,4 +1,4 @@ --- +-- -- STORED PROCEDURE -- ReindexResource -- diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/16.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/16.sql index c509b6a980..3572e1a5bc 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/16.sql +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/16.sql @@ -3527,4 +3527,4 @@ GO COMMIT TRANSACTION -GO +GO \ No newline at end of file diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/17.diff.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/17.diff.sql new file mode 100644 index 0000000000..f6402f467c --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/17.diff.sql @@ -0,0 +1,447 @@ +/************************************************************* + Resource Bulk Import feature +**************************************************************/ + +IF TYPE_ID(N'BulkImportResourceType_1') IS NULL +BEGIN + CREATE TYPE dbo.BulkImportResourceType_1 AS TABLE + ( + ResourceTypeId smallint NOT NULL, + ResourceId varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Version int NOT NULL, + IsHistory bit NOT NULL, + ResourceSurrogateId bigint NOT NULL, + IsDeleted bit NOT NULL, + RequestMethod varchar(10) NULL, + RawResource varbinary(max) NOT NULL, + IsRawResourceMetaSet bit NOT NULL DEFAULT 0, + SearchParamHash varchar(64) NULL + ) +END +GO + +/************************************************************* + Stored procedures for batch delete resources +**************************************************************/ +-- +-- STORED PROCEDURE +-- BatchDeleteResources +-- +-- DESCRIPTION +-- Batch delete resources +-- +-- PARAMETERS +-- @resourceTypeId +-- * The resoruce type id +-- @startResourceSurrogateId +-- * The start ResourceSurrogateId +-- @endResourceSurrogateId +-- * The end ResourceSurrogateId +-- @batchSize +-- * Max batch size for delete operation +CREATE OR ALTER PROCEDURE dbo.BatchDeleteResources + @resourceTypeId smallint, + @startResourceSurrogateId bigint, + @endResourceSurrogateId bigint, + @batchSize int +AS + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + BEGIN TRANSACTION + + DELETE Top(@batchSize) FROM dbo.Resource WITH (TABLOCK) + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId >= @startResourceSurrogateId AND ResourceSurrogateId < @endResourceSurrogateId + + COMMIT TRANSACTION + + return @@rowcount +GO + +/************************************************************* + Stored procedures for batch delete ResourceWriteClaims +**************************************************************/ +-- +-- STORED PROCEDURE +-- BatchDeleteResourceWriteClaims +-- +-- DESCRIPTION +-- Batch delete ResourceWriteClaims +-- +-- PARAMETERS +-- @startResourceSurrogateId +-- * The start ResourceSurrogateId +-- @endResourceSurrogateId +-- * The end ResourceSurrogateId +-- @batchSize +-- * Max batch size for delete operation +CREATE OR ALTER PROCEDURE dbo.BatchDeleteResourceWriteClaims + @startResourceSurrogateId bigint, + @endResourceSurrogateId bigint, + @batchSize int +AS + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + BEGIN TRANSACTION + + DELETE Top(@batchSize) FROM dbo.ResourceWriteClaim WITH (TABLOCK) + WHERE ResourceSurrogateId >= @startResourceSurrogateId AND ResourceSurrogateId < @endResourceSurrogateId + + COMMIT TRANSACTION + + return @@rowcount +GO + + +/************************************************************* + Stored procedures for batch delete ResourceParams +**************************************************************/ +-- +-- STORED PROCEDURE +-- BatchDeleteResourceParams +-- +-- DESCRIPTION +-- Batch delete ResourceParams +-- +-- PARAMETERS +-- @tableName +-- * Resource params table name +-- @resourceTypeId +-- * Resource type id +-- @startResourceSurrogateId +-- * The start ResourceSurrogateId +-- @endResourceSurrogateId +-- * The end ResourceSurrogateId +-- @batchSize +-- * Max batch size for delete operation +CREATE OR ALTER PROCEDURE dbo.BatchDeleteResourceParams + @tableName nvarchar(128), + @resourceTypeId smallint, + @startResourceSurrogateId bigint, + @endResourceSurrogateId bigint, + @batchSize int +AS + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + BEGIN TRANSACTION + + DECLARE @Sql NVARCHAR(MAX); + DECLARE @ParmDefinition NVARCHAR(512); + + IF OBJECT_ID(@tableName) IS NOT NULL BEGIN + SET @sql = N'DELETE TOP(@BatchSizeParam) FROM ' + @tableName + N' WITH (TABLOCK) WHERE ResourceTypeId = @ResourceTypeIdParam AND ResourceSurrogateId >= @StartResourceSurrogateIdParam AND ResourceSurrogateId < @EndResourceSurrogateIdParam' + SET @parmDefinition = N'@BatchSizeParam int, @ResourceTypeIdParam smallint, @StartResourceSurrogateIdParam bigint, @EndResourceSurrogateIdParam bigint'; + + EXECUTE sp_executesql @sql, @parmDefinition, + @BatchSizeParam = @batchSize, + @ResourceTypeIdParam = @resourceTypeId, + @StartResourceSurrogateIdParam = @startResourceSurrogateId, + @EndResourceSurrogateIdParam = @endResourceSurrogateId + END + + COMMIT TRANSACTION + + return @@rowcount +GO + +/************************************************************* + Stored procedures for disable index +**************************************************************/ +-- +-- STORED PROCEDURE +-- DisableIndex +-- +-- DESCRIPTION +-- Stored procedures for disable index +-- +-- PARAMETERS +-- @tableName +-- * index table name +-- @indexName +-- * index name +CREATE OR ALTER PROCEDURE [dbo].[DisableIndex] + @tableName nvarchar(128), + @indexName nvarchar(128) +AS + SET NOCOUNT ON + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + + DECLARE @IsExecuted INT + SET @IsExecuted = 0 + + BEGIN TRANSACTION + + IF EXISTS + ( + SELECT * + FROM [sys].[indexes] + WHERE name = @indexName + AND object_id = OBJECT_ID(@tableName) + AND is_disabled = 0 + ) + BEGIN + DECLARE @Sql NVARCHAR(MAX); + + SET @Sql = N'ALTER INDEX ' + QUOTENAME(@indexName) + + N' on ' + @tableName + ' Disable' + + EXECUTE sp_executesql @Sql + + SET @IsExecuted = 1 + END + + COMMIT TRANSACTION + + RETURN @IsExecuted +GO + +/************************************************************* + Stored procedures for rebuild index +**************************************************************/ +-- +-- STORED PROCEDURE +-- RebuildIndex +-- +-- DESCRIPTION +-- Stored procedures for rebuild index +-- +-- PARAMETERS +-- @tableName +-- * index table name +-- @indexName +-- * index name +CREATE OR ALTER PROCEDURE [dbo].[RebuildIndex] + @tableName nvarchar(128), + @indexName nvarchar(128) +AS + SET NOCOUNT ON + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + + DECLARE @IsExecuted INT + SET @IsExecuted = 0 + + BEGIN TRANSACTION + + IF EXISTS + ( + SELECT * + FROM [sys].[indexes] + WHERE name = @indexName + AND object_id = OBJECT_ID(@tableName) + AND is_disabled = 1 + ) + BEGIN + DECLARE @Sql NVARCHAR(MAX); + + SET @Sql = N'ALTER INDEX ' + QUOTENAME(@indexName) + + N' on ' + @tableName + ' Rebuild' + + EXECUTE sp_executesql @Sql + + SET @IsExecuted = 1 + END + + COMMIT TRANSACTION + + RETURN @IsExecuted +GO + +/************************************************************* + Stored procedures for bulk merge resources +**************************************************************/ +-- +-- STORED PROCEDURE +-- BulkMergeResource +-- +-- DESCRIPTION +-- Stored procedures for bulk merge resource +-- +-- PARAMETERS +-- @resources +-- * input resources +CREATE OR ALTER PROCEDURE dbo.BulkMergeResource + @resources dbo.BulkImportResourceType_1 READONLY +AS + SET NOCOUNT ON + SET XACT_ABORT ON + + BEGIN TRANSACTION + + MERGE INTO [dbo].[Resource] WITH (ROWLOCK, INDEX(IX_Resource_ResourceTypeId_ResourceId_Version)) AS target + USING @resources AS source + ON source.[ResourceTypeId] = target.[ResourceTypeId] + AND source.[ResourceId] = target.[ResourceId] + AND source.[Version] = target.[Version] + WHEN NOT MATCHED BY target THEN + INSERT ([ResourceTypeId] + , [ResourceId] + , [Version] + , [IsHistory] + , [ResourceSurrogateId] + , [IsDeleted] + , [RequestMethod] + , [RawResource] + , [IsRawResourceMetaSet] + , [SearchParamHash]) + VALUES ([ResourceTypeId] + , [ResourceId] + , [Version] + , [IsHistory] + , [ResourceSurrogateId] + , [IsDeleted] + , [RequestMethod] + , [RawResource] + , [IsRawResourceMetaSet] + , [SearchParamHash]) + OUTPUT Inserted.[ResourceSurrogateId]; + + COMMIT TRANSACTION +GO + +/************************************************************* + Stored procedures for general task +**************************************************************/ +-- +-- STORED PROCEDURE +-- CreateTask_2 +-- +-- DESCRIPTION +-- Create task for given task payload. +-- +-- PARAMETERS +-- @taskId +-- * The ID of the task record to create +-- @queueId +-- * The number of seconds that must pass before an export job is considered stale +-- @taskTypeId +-- * The maximum number of running jobs we can have at once +-- @maxRetryCount +-- * The maximum number for retry operation +-- @inputData +-- * Input data payload for the task +-- @isUniqueTaskByType +-- * Only create task if there's no other active task with same task type id +-- +CREATE OR ALTER PROCEDURE [dbo].[CreateTask_2] + @taskId varchar(64), + @queueId varchar(64), + @taskTypeId smallint, + @maxRetryCount smallint = 3, + @inputData varchar(max), + @isUniqueTaskByType bit +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + DECLARE @status smallint = 1 + DECLARE @retryCount smallint = 0 + DECLARE @isCanceled bit = 0 + + -- Check if the task already be created + IF (@isUniqueTaskByType = 1) BEGIN + IF EXISTS + ( + SELECT * + FROM [dbo].[TaskInfo] + WHERE TaskId = @taskId or (TaskTypeId = @taskTypeId and Status <> 3) + ) + BEGIN + THROW 50409, 'Task already existed', 1; + END + END + ELSE BEGIN + IF EXISTS + ( + SELECT * + FROM [dbo].[TaskInfo] + WHERE TaskId = @taskId + ) + BEGIN + THROW 50409, 'Task already existed', 1; + END + END + + -- Create new task + INSERT INTO [dbo].[TaskInfo] + (TaskId, QueueId, Status, TaskTypeId, IsCanceled, RetryCount, MaxRetryCount, HeartbeatDateTime, InputData) + VALUES + (@taskId, @queueId, @status, @taskTypeId, @isCanceled, @retryCount, @maxRetryCount, @heartbeatDateTime, @inputData) + + SELECT TaskId, QueueId, Status, TaskTypeId, RunId, IsCanceled, RetryCount, MaxRetryCount, HeartbeatDateTime, InputData + FROM [dbo].[TaskInfo] + where TaskId = @taskId + + COMMIT TRANSACTION +GO + +/************************************************************* + Stored procedures for get next available task +**************************************************************/ +-- +-- STORED PROCEDURE +-- GetNextTask +-- +-- DESCRIPTION +-- Get next available tasks +-- +-- PARAMETERS +-- @queueId +-- * The ID of the task record +-- @count +-- * Batch count for tasks list +-- @taskHeartbeatTimeoutThresholdInSeconds +-- * Timeout threshold in seconds for heart keep alive +CREATE OR ALTER PROCEDURE [dbo].[GetNextTask_2] + @queueId varchar(64), + @count smallint, + @taskHeartbeatTimeoutThresholdInSeconds int = 600 +AS + SET NOCOUNT ON + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + BEGIN TRANSACTION + + -- We will consider a job to be stale if its timestamp is smaller than or equal to this. + DECLARE @expirationDateTime dateTime2(7) + SELECT @expirationDateTime = DATEADD(second, -@taskHeartbeatTimeoutThresholdInSeconds, SYSUTCDATETIME()) + + DECLARE @availableJobs TABLE ( + TaskId varchar(64), + QueueId varchar(64), + Status smallint, + TaskTypeId smallint, + IsCanceled bit, + RetryCount smallint, + HeartbeatDateTime datetime2, + InputData varchar(max), + TaskContext varchar(max), + Result varchar(max) + ) + + INSERT INTO @availableJobs + SELECT TOP(@count) TaskId, QueueId, Status, TaskTypeId, IsCanceled, RetryCount, HeartbeatDateTime, InputData, TaskContext, Result + FROM dbo.TaskInfo + WHERE (QueueId = @queueId AND (Status = 1 OR (Status = 2 AND HeartbeatDateTime <= @expirationDateTime))) + ORDER BY HeartbeatDateTime + + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + + UPDATE dbo.TaskInfo + SET Status = 2, HeartbeatDateTime = @heartbeatDateTime, RunId = CAST(NEWID() AS NVARCHAR(50)) + FROM dbo.TaskInfo task INNER JOIN @availableJobs availableJob ON task.TaskId = availableJob.TaskId + + Select task.TaskId, task.QueueId, task.Status, task.TaskTypeId, task.RunId, task.IsCanceled, task.RetryCount, task.MaxRetryCount, task.HeartbeatDateTime, task.InputData, task.TaskContext, task.Result + from dbo.TaskInfo task INNER JOIN @availableJobs availableJob ON task.TaskId = availableJob.TaskId + + COMMIT TRANSACTION +GO diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/17.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/17.sql new file mode 100644 index 0000000000..dfb0a6ef22 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/17.sql @@ -0,0 +1,3853 @@ +-- Style guide: please see: https://github.com/ktaranov/sqlserver-kit/blob/master/SQL%20Server%20Name%20Convention%20and%20T-SQL%20Programming%20Style.md + +-- The wrapping up of the initialization script under a transaction is removed from the schema-manager tool, so adding transaction in the script itself. + +SET XACT_ABORT ON + +BEGIN TRANSACTION + +/************************************************************* + Schema Version +**************************************************************/ + +INSERT INTO dbo.SchemaVersion +VALUES + (17, 'started') + +GO + +/************************************************************* + Migration progress +**************************************************************/ + +CREATE TABLE dbo.SchemaMigrationProgress +( + Timestamp datetime2(3) default CURRENT_TIMESTAMP, + Message nvarchar(max) +) + +GO + +CREATE PROCEDURE dbo.LogSchemaMigrationProgress + @message varchar(max) +AS + INSERT INTO dbo.SchemaMigrationProgress (Message) VALUES (@message) +GO + +/************************************************************* + Partitioning function and scheme +**************************************************************/ + +CREATE PARTITION FUNCTION PartitionFunction_ResourceTypeId (smallint) +AS RANGE RIGHT FOR VALUES (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150); + +CREATE PARTITION SCHEME PartitionScheme_ResourceTypeId +AS PARTITION PartitionFunction_ResourceTypeId ALL TO ([PRIMARY]); + +/************************************************************* + Model tables +**************************************************************/ + +CREATE TABLE dbo.SearchParam +( + SearchParamId smallint IDENTITY(1,1) NOT NULL, + Uri varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status varchar(10) NULL, + LastUpdated datetimeoffset(7) NULL, + IsPartiallySupported bit NULL +) + +CREATE UNIQUE CLUSTERED INDEX IXC_SearchParam ON dbo.SearchParam +( + Uri +) + +CREATE TABLE dbo.ResourceType +( + ResourceTypeId smallint IDENTITY(1,1) NOT NULL, + Name nvarchar(50) COLLATE Latin1_General_100_CS_AS NOT NULL +) + +CREATE UNIQUE CLUSTERED INDEX IXC_ResourceType on dbo.ResourceType +( + Name +) + +-- Create System and QuantityCode tables + +CREATE TABLE dbo.System +( + SystemId int IDENTITY(1,1) NOT NULL, + Value nvarchar(256) NOT NULL, +) + +CREATE UNIQUE CLUSTERED INDEX IXC_System ON dbo.System +( + Value +) + +CREATE TABLE dbo.QuantityCode +( + QuantityCodeId int IDENTITY(1,1) NOT NULL, + Value nvarchar(256) COLLATE Latin1_General_100_CS_AS NOT NULL +) + +CREATE UNIQUE CLUSTERED INDEX IXC_QuantityCode on dbo.QuantityCode +( + Value +) + +/************************************************************* + Resource table +**************************************************************/ + +CREATE TABLE dbo.Resource +( + ResourceTypeId smallint NOT NULL, + ResourceId varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Version int NOT NULL, + IsHistory bit NOT NULL, + ResourceSurrogateId bigint NOT NULL, + IsDeleted bit NOT NULL, + RequestMethod varchar(10) NULL, + RawResource varbinary(max) NOT NULL, + IsRawResourceMetaSet bit NOT NULL DEFAULT 0, + SearchParamHash varchar(64) NULL +) + +ALTER TABLE dbo.Resource SET ( LOCK_ESCALATION = AUTO ) + +CREATE UNIQUE CLUSTERED INDEX IXC_Resource ON dbo.Resource +( + ResourceTypeId, + ResourceSurrogateId +) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + + +CREATE UNIQUE NONCLUSTERED INDEX IX_Resource_ResourceTypeId_ResourceId_Version ON dbo.Resource +( + ResourceTypeId, + ResourceId, + Version +) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE UNIQUE NONCLUSTERED INDEX IX_Resource_ResourceTypeId_ResourceId ON dbo.Resource +( + ResourceTypeId, + ResourceId +) +INCLUDE -- We want the query in UpsertResource, which is done with UPDLOCK AND HOLDLOCK, to not require a key lookup +( + Version, + IsDeleted +) +WHERE IsHistory = 0 +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE UNIQUE NONCLUSTERED INDEX IX_Resource_ResourceTypeId_ResourceSurrgateId ON dbo.Resource +( + ResourceTypeId, + ResourceSurrogateId +) +WHERE IsHistory = 0 AND IsDeleted = 0 +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_Resource_ResourceSurrogateId ON dbo.Resource +( + ResourceSurrogateId +) +ON [Primary] + +/************************************************************* + Capture claims on write +**************************************************************/ + +CREATE TABLE dbo.ClaimType +( + ClaimTypeId tinyint IDENTITY(1,1) NOT NULL, + Name varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL +) + +CREATE UNIQUE CLUSTERED INDEX IXC_Claim on dbo.ClaimType +( + Name +) + +CREATE TYPE dbo.BulkResourceWriteClaimTableType_1 AS TABLE +( + Offset int NOT NULL, + ClaimTypeId tinyint NOT NULL, + ClaimValue nvarchar(128) NOT NULL +) + +CREATE TABLE dbo.ResourceWriteClaim +( + ResourceSurrogateId bigint NOT NULL, + ClaimTypeId tinyint NOT NULL, + ClaimValue nvarchar(128) NOT NULL, +) WITH (DATA_COMPRESSION = PAGE) + +CREATE CLUSTERED INDEX IXC_ResourceWriteClaim on dbo.ResourceWriteClaim +( + ResourceSurrogateId, + ClaimTypeId +) + +/************************************************************* + Compartments +**************************************************************/ + +CREATE TABLE dbo.CompartmentType +( + CompartmentTypeId tinyint IDENTITY(1,1) NOT NULL, + Name varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL +) + +CREATE UNIQUE CLUSTERED INDEX IXC_CompartmentType on dbo.CompartmentType +( + Name +) + +CREATE TYPE dbo.BulkCompartmentAssignmentTableType_1 AS TABLE +( + Offset int NOT NULL, + CompartmentTypeId tinyint NOT NULL, + ReferenceResourceId varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL +) + +CREATE TABLE dbo.CompartmentAssignment +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + CompartmentTypeId tinyint NOT NULL, + ReferenceResourceId varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, + IsHistory bit NOT NULL, +) + +ALTER TABLE dbo.CompartmentAssignment SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_CompartmentAssignment +ON dbo.CompartmentAssignment +( + ResourceTypeId, + ResourceSurrogateId, + CompartmentTypeId, + ReferenceResourceId +) +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_CompartmentAssignment_CompartmentTypeId_ReferenceResourceId +ON dbo.CompartmentAssignment +( + ResourceTypeId, + CompartmentTypeId, + ReferenceResourceId, + ResourceSurrogateId +) +WHERE IsHistory = 0 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + +/************************************************************* + Reference Search Param +**************************************************************/ + +CREATE TYPE dbo.BulkReferenceSearchParamTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + BaseUri varchar(128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId smallint NULL, + ReferenceResourceId varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion int NULL +) + +CREATE TABLE dbo.ReferenceSearchParam +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + BaseUri varchar(128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId smallint NULL, + ReferenceResourceId varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion int NULL, + IsHistory bit NOT NULL, +) + +ALTER TABLE dbo.ReferenceSearchParam SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_ReferenceSearchParam +ON dbo.ReferenceSearchParam +( + ResourceTypeId, + ResourceSurrogateId, + SearchParamId +) +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_ReferenceSearchParam_SearchParamId_ReferenceResourceTypeId_ReferenceResourceId_BaseUri_ReferenceResourceVersion +ON dbo.ReferenceSearchParam +( + ResourceTypeId, + SearchParamId, + ReferenceResourceId, + ReferenceResourceTypeId, + BaseUri, + ResourceSurrogateId +) +INCLUDE +( + ReferenceResourceVersion +) +WHERE IsHistory = 0 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + +/************************************************************* + Token Search Param +**************************************************************/ + +CREATE TYPE dbo.BulkTokenSearchParamTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + SystemId int NULL, + Code varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL +) + +CREATE TABLE dbo.TokenSearchParam +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + SystemId int NULL, + Code varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + IsHistory bit NOT NULL, +) + +ALTER TABLE dbo.TokenSearchParam SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_TokenSearchParam +ON dbo.TokenSearchParam +( + ResourceTypeId, + ResourceSurrogateId, + SearchParamId +) +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenSeachParam_SearchParamId_Code_SystemId +ON dbo.TokenSearchParam +( + ResourceTypeId, + SearchParamId, + Code, + ResourceSurrogateId +) +INCLUDE +( + SystemId +) +WHERE IsHistory = 0 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + +/************************************************************* + Token Text +**************************************************************/ + +CREATE TYPE dbo.BulkTokenTextTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + Text nvarchar(400) COLLATE Latin1_General_CI_AI NOT NULL +) + +CREATE TABLE dbo.TokenText +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + Text nvarchar(400) COLLATE Latin1_General_CI_AI NOT NULL, + IsHistory bit NOT NULL +) + +ALTER TABLE dbo.TokenText SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_TokenText +ON dbo.TokenText +( + ResourceTypeId, + ResourceSurrogateId, + SearchParamId +) +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenText_SearchParamId_Text +ON dbo.TokenText +( + ResourceTypeId, + SearchParamId, + Text, + ResourceSurrogateId +) +WHERE IsHistory = 0 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + +/************************************************************* + String Search Param +**************************************************************/ + +CREATE TYPE dbo.BulkStringSearchParamTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + Text nvarchar(256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, + TextOverflow nvarchar(max) COLLATE Latin1_General_100_CI_AI_SC NULL +) + +CREATE TABLE dbo.StringSearchParam +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + Text nvarchar(256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, + TextOverflow nvarchar(max) COLLATE Latin1_General_100_CI_AI_SC NULL, + IsHistory bit NOT NULL +) + +ALTER TABLE dbo.StringSearchParam SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_StringSearchParam +ON dbo.StringSearchParam +( + ResourceTypeId, + ResourceSurrogateId, + SearchParamId +) +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_StringSearchParam_SearchParamId_Text +ON dbo.StringSearchParam +( + ResourceTypeId, + SearchParamId, + Text, + ResourceSurrogateId +) +INCLUDE +( + TextOverflow -- will not be needed when all servers are targeting at least this version. +) +WHERE IsHistory = 0 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_StringSearchParam_SearchParamId_TextWithOverflow +ON dbo.StringSearchParam +( + ResourceTypeId, + SearchParamId, + Text, + ResourceSurrogateId +) +WHERE IsHistory = 0 AND TextOverflow IS NOT NULL +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + +/************************************************************* + URI Search Param +**************************************************************/ + +CREATE TYPE dbo.BulkUriSearchParamTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + Uri varchar(256) COLLATE Latin1_General_100_CS_AS NOT NULL +) + +CREATE TABLE dbo.UriSearchParam +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + Uri varchar(256) COLLATE Latin1_General_100_CS_AS NOT NULL, + IsHistory bit NOT NULL +) + +ALTER TABLE dbo.UriSearchParam SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_UriSearchParam +ON dbo.UriSearchParam +( + ResourceTypeId, + ResourceSurrogateId, + SearchParamId +) +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_UriSearchParam_SearchParamId_Uri +ON dbo.UriSearchParam +( + ResourceTypeId, + SearchParamId, + Uri, + ResourceSurrogateId +) +WHERE IsHistory = 0 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + + +/************************************************************* + Number Search Param +**************************************************************/ + +-- We support the underlying value being a range, though we expect the vast majority of entries to be a single value. +-- Either: +-- (1) SingleValue is not null and LowValue and HighValue are both null, or +-- (2) SingleValue is null and LowValue and HighValue are both not null +-- We make use of filtered nonclustered indexes to keep queries over the ranges limited to those rows that actually have ranges + +CREATE TYPE dbo.BulkNumberSearchParamTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + SingleValue decimal(18,6) NULL, + LowValue decimal(18,6) NULL, + HighValue decimal(18,6) NULL +) + +CREATE TABLE dbo.NumberSearchParam +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + SingleValue decimal(18,6) NULL, + LowValue decimal(18,6) SPARSE NULL, + HighValue decimal(18,6) SPARSE NULL, + IsHistory bit NOT NULL +) + +ALTER TABLE dbo.NumberSearchParam SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_NumberSearchParam +ON dbo.NumberSearchParam +( + ResourceTypeId, + ResourceSurrogateId, + SearchParamId +) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_NumberSearchParam_SearchParamId_SingleValue +ON dbo.NumberSearchParam +( + ResourceTypeId, + SearchParamId, + SingleValue, + ResourceSurrogateId +) +WHERE IsHistory = 0 AND SingleValue IS NOT NULL +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_NumberSearchParam_SearchParamId_LowValue_HighValue +ON dbo.NumberSearchParam +( + ResourceTypeId, + SearchParamId, + LowValue, + HighValue, + ResourceSurrogateId +) +WHERE IsHistory = 0 AND LowValue IS NOT NULL +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_NumberSearchParam_SearchParamId_HighValue_LowValue +ON dbo.NumberSearchParam +( + ResourceTypeId, + SearchParamId, + HighValue, + LowValue, + ResourceSurrogateId +) +WHERE IsHistory = 0 AND LowValue IS NOT NULL +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + +/************************************************************* + Quantity Search Param +**************************************************************/ + +-- See comment above for number search params for how we store ranges + +CREATE TYPE dbo.BulkQuantitySearchParamTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + SystemId int NULL, + QuantityCodeId int NULL, + SingleValue decimal(18,6) NULL, + LowValue decimal(18,6) NULL, + HighValue decimal(18,6) NULL +) + +CREATE TABLE dbo.QuantitySearchParam +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + SystemId int NULL, + QuantityCodeId int NULL, + SingleValue decimal(18,6) NULL, + LowValue decimal(18,6) SPARSE NULL, + HighValue decimal(18,6) SPARSE NULL, + IsHistory bit NOT NULL +) + +ALTER TABLE dbo.QuantitySearchParam SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_QuantitySearchParam +ON dbo.QuantitySearchParam +( + ResourceTypeId, + ResourceSurrogateId, + SearchParamId +) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_QuantitySearchParam_SearchParamId_QuantityCodeId_SingleValue +ON dbo.QuantitySearchParam +( + ResourceTypeId, + SearchParamId, + QuantityCodeId, + SingleValue, + ResourceSurrogateId +) +INCLUDE +( + SystemId +) +WHERE IsHistory = 0 AND SingleValue IS NOT NULL +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_QuantitySearchParam_SearchParamId_QuantityCodeId_LowValue_HighValue +ON dbo.QuantitySearchParam +( + ResourceTypeId, + SearchParamId, + QuantityCodeId, + LowValue, + HighValue, + ResourceSurrogateId +) +INCLUDE +( + SystemId +) +WHERE IsHistory = 0 AND LowValue IS NOT NULL +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_QuantitySearchParam_SearchParamId_QuantityCodeId_HighValue_LowValue +ON dbo.QuantitySearchParam +( + ResourceTypeId, + SearchParamId, + QuantityCodeId, + HighValue, + LowValue, + ResourceSurrogateId +) +INCLUDE +( + SystemId +) +WHERE IsHistory = 0 AND LowValue IS NOT NULL +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + +/************************************************************* + Date Search Param +**************************************************************/ + +CREATE TYPE dbo.BulkDateTimeSearchParamTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + StartDateTime datetimeoffset(7) NOT NULL, + EndDateTime datetimeoffset(7) NOT NULL, + IsLongerThanADay bit NOT NULL +) + +CREATE TABLE dbo.DateTimeSearchParam +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + StartDateTime datetime2(7) NOT NULL, + EndDateTime datetime2(7) NOT NULL, + IsLongerThanADay bit NOT NULL, + IsHistory bit NOT NULL +) + +ALTER TABLE dbo.DateTimeSearchParam SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_DateTimeSearchParam +ON dbo.DateTimeSearchParam +( + ResourceTypeId, + ResourceSurrogateId, + SearchParamId +) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_DateTimeSearchParam_SearchParamId_StartDateTime_EndDateTime +ON dbo.DateTimeSearchParam +( + ResourceTypeId, + SearchParamId, + StartDateTime, + EndDateTime, + ResourceSurrogateId +) +INCLUDE +( + IsLongerThanADay +) +WHERE IsHistory = 0 +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_DateTimeSearchParam_SearchParamId_EndDateTime_StartDateTime +ON dbo.DateTimeSearchParam +( + ResourceTypeId, + SearchParamId, + EndDateTime, + StartDateTime, + ResourceSurrogateId +) +INCLUDE +( + IsLongerThanADay +) +WHERE IsHistory = 0 +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_DateTimeSearchParam_SearchParamId_StartDateTime_EndDateTime_Long +ON dbo.DateTimeSearchParam +( + ResourceTypeId, + SearchParamId, + StartDateTime, + EndDateTime, + ResourceSurrogateId +) +WHERE IsHistory = 0 AND IsLongerThanADay = 1 +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_DateTimeSearchParam_SearchParamId_EndDateTime_StartDateTime_Long +ON dbo.DateTimeSearchParam +( + ResourceTypeId, + SearchParamId, + EndDateTime, + StartDateTime, + ResourceSurrogateId +) +WHERE IsHistory = 0 AND IsLongerThanADay = 1 +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + +/************************************************************* + Reference$Token Composite Search Param +**************************************************************/ + +CREATE TYPE dbo.BulkReferenceTokenCompositeSearchParamTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + BaseUri1 varchar(128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId1 smallint NULL, + ReferenceResourceId1 varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion1 int NULL, + SystemId2 int NULL, + Code2 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL +) + +CREATE TABLE dbo.ReferenceTokenCompositeSearchParam +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + BaseUri1 varchar(128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId1 smallint NULL, + ReferenceResourceId1 varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion1 int NULL, + SystemId2 int NULL, + Code2 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + IsHistory bit NOT NULL, +) + +ALTER TABLE dbo.ReferenceTokenCompositeSearchParam SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_ReferenceTokenCompositeSearchParam +ON dbo.ReferenceTokenCompositeSearchParam +( + ResourceTypeId, + ResourceSurrogateId, + SearchParamId +) +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_ReferenceTokenCompositeSearchParam_ReferenceResourceId1_Code2 +ON dbo.ReferenceTokenCompositeSearchParam +( + ResourceTypeId, + SearchParamId, + ReferenceResourceId1, + Code2, + ResourceSurrogateId +) +INCLUDE +( + ReferenceResourceTypeId1, + BaseUri1, + SystemId2 +) +WHERE IsHistory = 0 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + +/************************************************************* + Token$Token Composite Search Param +**************************************************************/ + +CREATE TYPE dbo.BulkTokenTokenCompositeSearchParamTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + SystemId1 int NULL, + Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + SystemId2 int NULL, + Code2 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL +) + +CREATE TABLE dbo.TokenTokenCompositeSearchParam +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + SystemId1 int NULL, + Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + SystemId2 int NULL, + Code2 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + IsHistory bit NOT NULL +) + +ALTER TABLE dbo.TokenTokenCompositeSearchParam SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_TokenTokenCompositeSearchParam +ON dbo.TokenTokenCompositeSearchParam +( + ResourceSurrogateId, + SearchParamId +) +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenTokenCompositeSearchParam_Code1_Code2 +ON dbo.TokenTokenCompositeSearchParam +( + ResourceTypeId, + SearchParamId, + Code1, + Code2, + ResourceSurrogateId +) +INCLUDE +( + SystemId1, + SystemId2 +) +WHERE IsHistory = 0 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + +/************************************************************* + Token$DateTime Composite Search Param +**************************************************************/ + +CREATE TYPE dbo.BulkTokenDateTimeCompositeSearchParamTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + SystemId1 int NULL, + Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + StartDateTime2 datetimeoffset(7) NOT NULL, + EndDateTime2 datetimeoffset(7) NOT NULL, + IsLongerThanADay2 bit NOT NULL +) + +CREATE TABLE dbo.TokenDateTimeCompositeSearchParam +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + SystemId1 int NULL, + Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + StartDateTime2 datetime2(7) NOT NULL, + EndDateTime2 datetime2(7) NOT NULL, + IsLongerThanADay2 bit NOT NULL, + IsHistory bit NOT NULL, +) + +ALTER TABLE dbo.TokenDateTimeCompositeSearchParam SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_TokenDateTimeCompositeSearchParam +ON dbo.TokenDateTimeCompositeSearchParam +( + ResourceTypeId, + ResourceSurrogateId, + SearchParamId +) +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenDateTimeCompositeSearchParam_Code1_StartDateTime2_EndDateTime2 +ON dbo.TokenDateTimeCompositeSearchParam +( + ResourceTypeId, + SearchParamId, + Code1, + StartDateTime2, + EndDateTime2, + ResourceSurrogateId +) +INCLUDE +( + SystemId1, + IsLongerThanADay2 +) + +WHERE IsHistory = 0 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenDateTimeCompositeSearchParam_Code1_EndDateTime2_StartDateTime2 +ON dbo.TokenDateTimeCompositeSearchParam +( + ResourceTypeId, + SearchParamId, + Code1, + EndDateTime2, + StartDateTime2, + ResourceSurrogateId +) +INCLUDE +( + SystemId1, + IsLongerThanADay2 +) +WHERE IsHistory = 0 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenDateTimeCompositeSearchParam_Code1_StartDateTime2_EndDateTime2_Long +ON dbo.TokenDateTimeCompositeSearchParam +( + ResourceTypeId, + SearchParamId, + Code1, + StartDateTime2, + EndDateTime2, + ResourceSurrogateId +) +INCLUDE +( + SystemId1 +) + +WHERE IsHistory = 0 AND IsLongerThanADay2 = 1 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenDateTimeCompositeSearchParam_Code1_EndDateTime2_StartDateTime2_Long +ON dbo.TokenDateTimeCompositeSearchParam +( + ResourceTypeId, + SearchParamId, + Code1, + EndDateTime2, + StartDateTime2, + ResourceSurrogateId +) +INCLUDE +( + SystemId1 +) +WHERE IsHistory = 0 AND IsLongerThanADay2 = 1 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + +/************************************************************* + Token$Quantity Composite Search Param +**************************************************************/ + +CREATE TYPE dbo.BulkTokenQuantityCompositeSearchParamTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + SystemId1 int NULL, + Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + SystemId2 int NULL, + QuantityCodeId2 int NULL, + SingleValue2 decimal(18,6) NULL, + LowValue2 decimal(18,6) NULL, + HighValue2 decimal(18,6) NULL +) + +CREATE TABLE dbo.TokenQuantityCompositeSearchParam +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + SystemId1 int NULL, + Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + SystemId2 int NULL, + QuantityCodeId2 int NULL, + SingleValue2 decimal(18,6) NULL, + LowValue2 decimal(18,6) NULL, + HighValue2 decimal(18,6) NULL, + IsHistory bit NOT NULL, +) + +ALTER TABLE dbo.TokenQuantityCompositeSearchParam SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_TokenQuantityCompositeSearchParam +ON dbo.TokenQuantityCompositeSearchParam +( + ResourceTypeId, + ResourceSurrogateId, + SearchParamId +) +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenQuantityCompositeSearchParam_SearchParamId_Code1_QuantityCodeId2_SingleValue2 +ON dbo.TokenQuantityCompositeSearchParam +( + ResourceTypeId, + SearchParamId, + Code1, + SingleValue2, + ResourceSurrogateId +) +INCLUDE +( + QuantityCodeId2, + SystemId1, + SystemId2 +) +WHERE IsHistory = 0 AND SingleValue2 IS NOT NULL +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenQuantityCompositeSearchParam_SearchParamId_Code1_QuantityCodeId2_LowValue2_HighValue2 +ON dbo.TokenQuantityCompositeSearchParam +( + ResourceTypeId, + SearchParamId, + Code1, + LowValue2, + HighValue2, + ResourceSurrogateId +) +INCLUDE +( + QuantityCodeId2, + SystemId1, + SystemId2 +) +WHERE IsHistory = 0 AND LowValue2 IS NOT NULL +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenQuantityCompositeSearchParam_SearchParamId_Code1_QuantityCodeId2_HighValue2_LowValue2 +ON dbo.TokenQuantityCompositeSearchParam +( + ResourceTypeId, + SearchParamId, + Code1, + HighValue2, + LowValue2, + ResourceSurrogateId +) +INCLUDE +( + QuantityCodeId2, + SystemId1, + SystemId2 +) +WHERE IsHistory = 0 AND LowValue2 IS NOT NULL +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + +/************************************************************* + Token$String Composite Search Param +**************************************************************/ + +CREATE TYPE dbo.BulkTokenStringCompositeSearchParamTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + SystemId1 int NULL, + Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + Text2 nvarchar(256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, + TextOverflow2 nvarchar(max) COLLATE Latin1_General_100_CI_AI_SC NULL +) + +CREATE TABLE dbo.TokenStringCompositeSearchParam +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + SystemId1 int NULL, + Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + Text2 nvarchar(256) COLLATE Latin1_General_CI_AI NOT NULL, + TextOverflow2 nvarchar(max) COLLATE Latin1_General_CI_AI NULL, + IsHistory bit NOT NULL, +) + +ALTER TABLE dbo.TokenStringCompositeSearchParam SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_TokenStringCompositeSearchParam +ON dbo.TokenStringCompositeSearchParam +( + ResourceSurrogateId, + SearchParamId +) +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenStringCompositeSearchParam_SearchParamId_Code1_Text2 +ON dbo.TokenStringCompositeSearchParam +( + ResourceTypeId, + SearchParamId, + Code1, + Text2, + ResourceSurrogateId +) +INCLUDE +( + SystemId1, + TextOverflow2 -- will not be needed when all servers are targeting at least this version. +) +WHERE IsHistory = 0 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenStringCompositeSearchParam_SearchParamId_Code1_Text2WithOverflow +ON dbo.TokenStringCompositeSearchParam +( + ResourceTypeId, + SearchParamId, + Code1, + Text2, + ResourceSurrogateId +) +INCLUDE +( + SystemId1 +) +WHERE IsHistory = 0 AND TextOverflow2 IS NOT NULL +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + + +/************************************************************* + Token$Number$Number Composite Search Param +**************************************************************/ + +-- See number search param for how we deal with null. We apply a similar pattern here, +-- except that we pass in a HasRange bit though the TVP. The alternative would have +-- for a computed column, but a computed column cannot be used in as a index filter +-- (even if it is a persisted computed column). + +CREATE TYPE dbo.BulkTokenNumberNumberCompositeSearchParamTableType_1 AS TABLE +( + Offset int NOT NULL, + SearchParamId smallint NOT NULL, + SystemId1 int NULL, + Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + SingleValue2 decimal(18,6) NULL, + LowValue2 decimal(18,6) NULL, + HighValue2 decimal(18,6) NULL, + SingleValue3 decimal(18,6) NULL, + LowValue3 decimal(18,6) NULL, + HighValue3 decimal(18,6) NULL, + HasRange bit NOT NULL +) + +CREATE TABLE dbo.TokenNumberNumberCompositeSearchParam +( + ResourceTypeId smallint NOT NULL, + ResourceSurrogateId bigint NOT NULL, + SearchParamId smallint NOT NULL, + SystemId1 int NULL, + Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + SingleValue2 decimal(18,6) NULL, + LowValue2 decimal(18,6) NULL, + HighValue2 decimal(18,6) NULL, + SingleValue3 decimal(18,6) NULL, + LowValue3 decimal(18,6) NULL, + HighValue3 decimal(18,6) NULL, + HasRange bit NOT NULL, + IsHistory bit NOT NULL, +) + +ALTER TABLE dbo.TokenNumberNumberCompositeSearchParam SET ( LOCK_ESCALATION = AUTO ) + +CREATE CLUSTERED INDEX IXC_TokenNumberNumberCompositeSearchParam +ON dbo.TokenNumberNumberCompositeSearchParam +( + ResourceTypeId, + ResourceSurrogateId, + SearchParamId +) +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenNumberNumberCompositeSearchParam_SearchParamId_Code1_Text2 +ON dbo.TokenNumberNumberCompositeSearchParam +( + ResourceTypeId, + SearchParamId, + Code1, + SingleValue2, + SingleValue3, + ResourceSurrogateId +) +INCLUDE +( + SystemId1 +) +WHERE IsHistory = 0 AND HasRange = 0 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +CREATE NONCLUSTERED INDEX IX_TokenNumberNumberCompositeSearchParam_SearchParamId_Code1_LowValue2_HighValue2_LowValue3_HighValue3 +ON dbo.TokenNumberNumberCompositeSearchParam +( + ResourceTypeId, + SearchParamId, + Code1, + LowValue2, + HighValue2, + LowValue3, + HighValue3, + ResourceSurrogateId +) +INCLUDE +( + SystemId1 +) +WHERE IsHistory = 0 AND HasRange = 1 +WITH (DATA_COMPRESSION = PAGE) +ON PartitionScheme_ResourceTypeId(ResourceTypeId) + +GO + +/************************************************************* + Sequence for generating unique 12.5ns "tick" components that are added + to a base ID based on the timestamp to form a unique resource surrogate ID +**************************************************************/ + +CREATE SEQUENCE dbo.ResourceSurrogateIdUniquifierSequence + AS int + START WITH 0 + INCREMENT BY 1 + MINVALUE 0 + MAXVALUE 79999 + CYCLE + CACHE 1000000 +GO + +/************************************************************* + Stored procedures for creating and deleting +**************************************************************/ + +-- +-- STORED PROCEDURE +-- UpsertResource_4 +-- +-- DESCRIPTION +-- Creates or updates (including marking deleted) a FHIR resource +-- +-- PARAMETERS +-- @baseResourceSurrogateId +-- * A bigint to which a value between [0, 80000) is added, forming a unique ResourceSurrogateId. +-- * This value should be the current UTC datetime, truncated to millisecond precision, with its 100ns ticks component bitshifted left by 3. +-- @resourceTypeId +-- * The ID of the resource type (See ResourceType table) +-- @resourceId +-- * The resource ID (must be the same as the in the resource itself) +-- @etag +-- * If specified, the version of the resource to update +-- @allowCreate +-- * If false, an error is thrown if the resource does not already exist +-- @isDeleted +-- * Whether this resource marks the resource as deleted +-- @keepHistory +-- * Whether the existing version of the resource should be preserved +-- @requestMethod +-- * The HTTP method/verb used for the request +-- @searchParamHash +-- * A hash of the resource's latest indexed search parameters +-- @rawResource +-- * A compressed UTF16-encoded JSON document +-- @resourceWriteClaims +-- * Claims on the principal that performed the write +-- @compartmentAssignments +-- * Compartments that the resource is part of +-- @referenceSearchParams +-- * Extracted reference search params +-- @tokenSearchParams +-- * Extracted token search params +-- @tokenTextSearchParams +-- * The text representation of extracted token search params +-- @stringSearchParams +-- * Extracted string search params +-- @numberSearchParams +-- * Extracted number search params +-- @quantitySearchParams +-- * Extracted quantity search params +-- @uriSearchParams +-- * Extracted URI search params +-- @dateTimeSearchParms +-- * Extracted datetime search params +-- @referenceTokenCompositeSearchParams +-- * Extracted reference$token search params +-- @tokenTokenCompositeSearchParams +-- * Extracted token$token tokensearch params +-- @tokenDateTimeCompositeSearchParams +-- * Extracted token$datetime search params +-- @tokenQuantityCompositeSearchParams +-- * Extracted token$quantity search params +-- @tokenStringCompositeSearchParams +-- * Extracted token$string search params +-- @tokenNumberNumberCompositeSearchParams +-- * Extracted token$number$number search params +-- @isResourceChangeCaptureEnabled +-- * Whether capturing resource change data +-- +-- RETURN VALUE +-- The version of the resource as a result set. Will be empty if no insertion was done. +-- +CREATE PROCEDURE dbo.UpsertResource_4 + @baseResourceSurrogateId bigint, + @resourceTypeId smallint, + @resourceId varchar(64), + @eTag int = NULL, + @allowCreate bit, + @isDeleted bit, + @keepHistory bit, + @requestMethod varchar(10), + @searchParamHash varchar(64), + @rawResource varbinary(max), + @resourceWriteClaims dbo.BulkResourceWriteClaimTableType_1 READONLY, + @compartmentAssignments dbo.BulkCompartmentAssignmentTableType_1 READONLY, + @referenceSearchParams dbo.BulkReferenceSearchParamTableType_1 READONLY, + @tokenSearchParams dbo.BulkTokenSearchParamTableType_1 READONLY, + @tokenTextSearchParams dbo.BulkTokenTextTableType_1 READONLY, + @stringSearchParams dbo.BulkStringSearchParamTableType_1 READONLY, + @numberSearchParams dbo.BulkNumberSearchParamTableType_1 READONLY, + @quantitySearchParams dbo.BulkQuantitySearchParamTableType_1 READONLY, + @uriSearchParams dbo.BulkUriSearchParamTableType_1 READONLY, + @dateTimeSearchParms dbo.BulkDateTimeSearchParamTableType_1 READONLY, + @referenceTokenCompositeSearchParams dbo.BulkReferenceTokenCompositeSearchParamTableType_1 READONLY, + @tokenTokenCompositeSearchParams dbo.BulkTokenTokenCompositeSearchParamTableType_1 READONLY, + @tokenDateTimeCompositeSearchParams dbo.BulkTokenDateTimeCompositeSearchParamTableType_1 READONLY, + @tokenQuantityCompositeSearchParams dbo.BulkTokenQuantityCompositeSearchParamTableType_1 READONLY, + @tokenStringCompositeSearchParams dbo.BulkTokenStringCompositeSearchParamTableType_1 READONLY, + @tokenNumberNumberCompositeSearchParams dbo.BulkTokenNumberNumberCompositeSearchParamTableType_1 READONLY, + @isResourceChangeCaptureEnabled bit = 0 +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + -- variables for the existing version of the resource that will be replaced + DECLARE @previousResourceSurrogateId bigint + DECLARE @previousVersion bigint + DECLARE @previousIsDeleted bit + + -- This should place a range lock on a row in the IX_Resource_ResourceTypeId_ResourceId nonclustered filtered index + SELECT @previousResourceSurrogateId = ResourceSurrogateId, @previousVersion = Version, @previousIsDeleted = IsDeleted + FROM dbo.Resource WITH (UPDLOCK, HOLDLOCK) + WHERE ResourceTypeId = @resourceTypeId AND ResourceId = @resourceId AND IsHistory = 0 + + IF (@etag IS NOT NULL AND @etag <> @previousVersion) BEGIN + THROW 50412, 'Precondition failed', 1; + END + + DECLARE @version int -- the version of the resource being written + + IF (@previousResourceSurrogateId IS NULL) BEGIN + -- There is no previous version of this resource + + IF (@isDeleted = 1) BEGIN + -- Don't bother marking the resource as deleted since it already does not exist. + COMMIT TRANSACTION + RETURN + END + + IF (@etag IS NOT NULL) BEGIN + -- You can't update a resource with a specified version if the resource does not exist + THROW 50404, 'Resource with specified version not found', 1; + END + + IF (@allowCreate = 0) BEGIN + THROW 50405, 'Resource does not exist and create is not allowed', 1; + END + + SET @version = 1 + END + ELSE BEGIN + -- There is a previous version + + IF (@isDeleted = 1 AND @previousIsDeleted = 1) BEGIN + -- Already deleted - don't create a new version + COMMIT TRANSACTION + RETURN + END + + SET @version = @previousVersion + 1 + + IF (@keepHistory = 1) BEGIN + + -- Set the existing resource as history + UPDATE dbo.Resource + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + -- Set the indexes for this resource as history. + -- Note there is no IsHistory column on ResourceWriteClaim since we do not query it. + + UPDATE dbo.CompartmentAssignment + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.ReferenceSearchParam + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.TokenSearchParam + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.TokenText + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.StringSearchParam + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.UriSearchParam + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.NumberSearchParam + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.QuantitySearchParam + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.DateTimeSearchParam + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.ReferenceTokenCompositeSearchParam + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.TokenTokenCompositeSearchParam + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.TokenDateTimeCompositeSearchParam + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.TokenQuantityCompositeSearchParam + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.TokenStringCompositeSearchParam + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + UPDATE dbo.TokenNumberNumberCompositeSearchParam + SET IsHistory = 1 + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + END + ELSE BEGIN + + -- Not keeping history. Delete the current resource and all associated indexes. + + DELETE FROM dbo.Resource + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.ResourceWriteClaim + WHERE ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.CompartmentAssignment + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.ReferenceSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.TokenSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.TokenText + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.StringSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.UriSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.NumberSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.QuantitySearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.DateTimeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.ReferenceTokenCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.TokenTokenCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.TokenDateTimeCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.TokenQuantityCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.TokenStringCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + DELETE FROM dbo.TokenNumberNumberCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @previousResourceSurrogateId + + END + END + + DECLARE @resourceSurrogateId bigint = @baseResourceSurrogateId + (NEXT VALUE FOR ResourceSurrogateIdUniquifierSequence) + DECLARE @isRawResourceMetaSet bit + + IF (@version = 1) BEGIN SET @isRawResourceMetaSet = 1 END ELSE BEGIN SET @isRawResourceMetaSet = 0 END + + INSERT INTO dbo.Resource + (ResourceTypeId, ResourceId, Version, IsHistory, ResourceSurrogateId, IsDeleted, RequestMethod, RawResource, IsRawResourceMetaSet, SearchParamHash) + VALUES + (@resourceTypeId, @resourceId, @version, 0, @resourceSurrogateId, @isDeleted, @requestMethod, @rawResource, @isRawResourceMetaSet, @searchParamHash) + + INSERT INTO dbo.ResourceWriteClaim + (ResourceSurrogateId, ClaimTypeId, ClaimValue) + SELECT @resourceSurrogateId, ClaimTypeId, ClaimValue + FROM @resourceWriteClaims + + INSERT INTO dbo.CompartmentAssignment + (ResourceTypeId, ResourceSurrogateId, CompartmentTypeId, ReferenceResourceId, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, CompartmentTypeId, ReferenceResourceId, 0 + FROM @compartmentAssignments + + INSERT INTO dbo.ReferenceSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion, 0 + FROM @referenceSearchParams + + INSERT INTO dbo.TokenSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, Code, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId, Code, 0 + FROM @tokenSearchParams + + INSERT INTO dbo.TokenText + (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, Text, 0 + FROM @tokenTextSearchParams + + INSERT INTO dbo.StringSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, TextOverflow, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, Text, TextOverflow, 0 + FROM @stringSearchParams + + INSERT INTO dbo.UriSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, Uri, 0 + FROM @uriSearchParams + + INSERT INTO dbo.NumberSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue, 0 + FROM @numberSearchParams + + INSERT INTO dbo.QuantitySearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue, 0 + FROM @quantitySearchParams + + INSERT INTO dbo.DateTimeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, 0 + FROM @dateTimeSearchParms + + INSERT INTO dbo.ReferenceTokenCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, 0 + FROM @referenceTokenCompositeSearchParams + + INSERT INTO dbo.TokenTokenCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, SystemId2, Code2, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId1, Code1, SystemId2, Code2, 0 + FROM @tokenTokenCompositeSearchParams + + INSERT INTO dbo.TokenDateTimeCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, StartDateTime2, EndDateTime2, IsLongerThanADay2, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId1, Code1, StartDateTime2, EndDateTime2, IsLongerThanADay2, 0 + FROM @tokenDateTimeCompositeSearchParams + + INSERT INTO dbo.TokenQuantityCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId1, Code1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2, 0 + FROM @tokenQuantityCompositeSearchParams + + INSERT INTO dbo.TokenStringCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, Text2, TextOverflow2, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId1, Code1, Text2, TextOverflow2, 0 + FROM @tokenStringCompositeSearchParams + + INSERT INTO dbo.TokenNumberNumberCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId1, Code1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange, 0 + FROM @tokenNumberNumberCompositeSearchParams + + SELECT @version + + IF (@isResourceChangeCaptureEnabled = 1) BEGIN + --If the resource change capture feature is enabled, to execute a stored procedure called CaptureResourceChanges to insert resource change data. + EXEC dbo.CaptureResourceChanges @isDeleted=@isDeleted, @version=@version, @resourceId=@resourceId, @resourceTypeId=@resourceTypeId + END + + COMMIT TRANSACTION +GO + +-- +-- STORED PROCEDURE +-- ReadResource +-- +-- DESCRIPTION +-- Reads a single resource, optionally a specific version of the resource. +-- +-- PARAMETERS +-- @resourceTypeId +-- * The ID of the resource type (See ResourceType table) +-- @resourceId +-- * The resource ID +-- @version +-- * A specific version of the resource. If null, returns the latest version. +-- RETURN VALUE +-- A result set with 0 or 1 rows. +-- +CREATE PROCEDURE dbo.ReadResource + @resourceTypeId smallint, + @resourceId varchar(64), + @version int = NULL +AS + SET NOCOUNT ON + + IF (@version IS NULL) BEGIN + SELECT ResourceSurrogateId, Version, IsDeleted, IsHistory, RawResource, IsRawResourceMetaSet, SearchParamHash + FROM dbo.Resource + WHERE ResourceTypeId = @resourceTypeId AND ResourceId = @resourceId AND IsHistory = 0 + END + ELSE BEGIN + SELECT ResourceSurrogateId, Version, IsDeleted, IsHistory, RawResource, IsRawResourceMetaSet, SearchParamHash + FROM dbo.Resource + WHERE ResourceTypeId = @resourceTypeId AND ResourceId = @resourceId AND Version = @version + END +GO + +-- +-- STORED PROCEDURE +-- Deletes a single resource's history, and optionally the resource itself +-- +-- DESCRIPTION +-- Permanently deletes all history data related to a resource. Optionally removes all data, including the current resource version. +-- Data remains recoverable from the transaction log, however. +-- +-- PARAMETERS +-- @resourceTypeId +-- * The ID of the resource type (See ResourceType table) +-- @resourceId +-- * The resource ID (must be the same as in the resource itself) +-- @keepCurrentVersion +-- * When 1, the current resource version kept, else all data is removed. +-- +CREATE PROCEDURE dbo.HardDeleteResource_2 + @resourceTypeId smallint, + @resourceId varchar(64), + @keepCurrentVersion smallint +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + DECLARE @resourceSurrogateIds TABLE(ResourceSurrogateId bigint NOT NULL) + + DELETE FROM dbo.Resource + OUTPUT deleted.ResourceSurrogateId + INTO @resourceSurrogateIds + WHERE ResourceTypeId = @resourceTypeId AND ResourceId = @resourceId + AND NOT(@keepCurrentVersion=1 and IsHistory=0) + + DELETE FROM dbo.ResourceWriteClaim + WHERE ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.CompartmentAssignment + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.ReferenceSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.TokenSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.TokenText + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.StringSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.UriSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.NumberSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.QuantitySearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.DateTimeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.ReferenceTokenCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.TokenTokenCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.TokenDateTimeCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.TokenQuantityCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.TokenStringCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + DELETE FROM dbo.TokenNumberNumberCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId IN (SELECT ResourceSurrogateId FROM @resourceSurrogateIds) + + COMMIT TRANSACTION +GO + +/************************************************************* + Export Job +**************************************************************/ +CREATE TABLE dbo.ExportJob +( + Id varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Hash varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status varchar(10) NOT NULL, + HeartbeatDateTime datetime2(7) NULL, + RawJobRecord varchar(max) NOT NULL, + JobVersion rowversion NOT NULL +) + +CREATE UNIQUE CLUSTERED INDEX IXC_ExportJob ON dbo.ExportJob +( + Id +) + +CREATE UNIQUE NONCLUSTERED INDEX IX_ExportJob_Hash_Status_HeartbeatDateTime ON dbo.ExportJob +( + Hash, + Status, + HeartbeatDateTime +) + +GO + +/************************************************************* + Stored procedures for exporting +**************************************************************/ +-- +-- STORED PROCEDURE +-- Creates an export job. +-- +-- DESCRIPTION +-- Creates a new row to the ExportJob table, adding a new job to the queue of jobs to be processed. +-- +-- PARAMETERS +-- @id +-- * The ID of the export job record +-- @hash +-- * The SHA256 hash of the export job record ID +-- @status +-- * The status of the export job +-- @rawJobRecord +-- * A JSON document +-- +-- RETURN VALUE +-- The row version of the created export job. +-- +CREATE PROCEDURE dbo.CreateExportJob + @id varchar(64), + @hash varchar(64), + @status varchar(10), + @rawJobRecord varchar(max) +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + + INSERT INTO dbo.ExportJob + (Id, Hash, Status, HeartbeatDateTime, RawJobRecord) + VALUES + (@id, @hash, @status, @heartbeatDateTime, @rawJobRecord) + + SELECT CAST(MIN_ACTIVE_ROWVERSION() AS INT) + + COMMIT TRANSACTION +GO + +-- +-- STORED PROCEDURE +-- Gets an export job given its ID. +-- +-- DESCRIPTION +-- Retrieves the export job record from the ExportJob table that has the matching ID. +-- +-- PARAMETERS +-- @id +-- * The ID of the export job record to retrieve +-- +-- RETURN VALUE +-- The matching export job. +-- +CREATE PROCEDURE dbo.GetExportJobById + @id varchar(64) +AS + SET NOCOUNT ON + + SELECT RawJobRecord, JobVersion + FROM dbo.ExportJob + WHERE Id = @id +GO + +-- +-- STORED PROCEDURE +-- Gets an export job given the hash of its ID. +-- +-- DESCRIPTION +-- Retrieves the export job record from the ExportJob table that has the matching hash. +-- +-- PARAMETERS +-- @hash +-- * The SHA256 hash of the export job record ID +-- +-- RETURN VALUE +-- The matching export job. +-- +CREATE PROCEDURE dbo.GetExportJobByHash + @hash varchar(64) +AS + SET NOCOUNT ON + + SELECT TOP(1) RawJobRecord, JobVersion + FROM dbo.ExportJob + WHERE Hash = @hash AND (Status = 'Queued' OR Status = 'Running') + ORDER BY HeartbeatDateTime ASC +GO + +-- +-- STORED PROCEDURE +-- Updates an export job. +-- +-- DESCRIPTION +-- Modifies an existing job in the ExportJob table. +-- +-- PARAMETERS +-- @id +-- * The ID of the export job record +-- @status +-- * The status of the export job +-- @rawJobRecord +-- * A JSON document +-- @jobVersion +-- * The version of the job to update must match this +-- +-- RETURN VALUE +-- The row version of the updated export job. +-- +CREATE PROCEDURE dbo.UpdateExportJob + @id varchar(64), + @status varchar(10), + @rawJobRecord varchar(max), + @jobVersion binary(8) +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + DECLARE @currentJobVersion binary(8) + + -- Acquire and hold an update lock on a row in the ExportJob table for the entire transaction. + -- This ensures the version check and update occur atomically. + SELECT @currentJobVersion = JobVersion + FROM dbo.ExportJob WITH (UPDLOCK, HOLDLOCK) + WHERE Id = @id + + IF (@currentJobVersion IS NULL) BEGIN + THROW 50404, 'Export job record not found', 1; + END + + IF (@jobVersion <> @currentJobVersion) BEGIN + THROW 50412, 'Precondition failed', 1; + END + + -- We will timestamp the jobs when we update them to track stale jobs. + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + + UPDATE dbo.ExportJob + SET Status = @status, HeartbeatDateTime = @heartbeatDateTime, RawJobRecord = @rawJobRecord + WHERE Id = @id + + SELECT @@DBTS + + COMMIT TRANSACTION +GO + +-- +-- STORED PROCEDURE +-- Acquires export jobs. +-- +-- DESCRIPTION +-- Timestamps the available export jobs and sets their statuses to running. +-- +-- PARAMETERS +-- @jobHeartbeatTimeoutThresholdInSeconds +-- * The number of seconds that must pass before an export job is considered stale +-- @maximumNumberOfConcurrentJobsAllowed +-- * The maximum number of running jobs we can have at once +-- +-- RETURN VALUE +-- The updated jobs that are now running. +-- +CREATE PROCEDURE dbo.AcquireExportJobs + @jobHeartbeatTimeoutThresholdInSeconds bigint, + @maximumNumberOfConcurrentJobsAllowed int +AS + SET NOCOUNT ON + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + BEGIN TRANSACTION + + -- We will consider a job to be stale if its timestamp is smaller than or equal to this. + DECLARE @expirationDateTime dateTime2(7) + SELECT @expirationDateTime = DATEADD(second, -@jobHeartbeatTimeoutThresholdInSeconds, SYSUTCDATETIME()) + + -- Get the number of jobs that are running and not stale. + -- Acquire and hold an exclusive table lock for the entire transaction to prevent jobs from being created, updated or deleted during acquisitions. + DECLARE @numberOfRunningJobs int + SELECT @numberOfRunningJobs = COUNT(*) FROM dbo.ExportJob WITH (TABLOCKX) WHERE Status = 'Running' AND HeartbeatDateTime > @expirationDateTime + + -- Determine how many available jobs we can pick up. + DECLARE @limit int = @maximumNumberOfConcurrentJobsAllowed - @numberOfRunningJobs; + + DECLARE @availableJobs TABLE (Id varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, JobVersion binary(8) NOT NULL) + + -- Get the available jobs, which are export jobs that are queued or stale. + -- Older jobs will be prioritized over newer ones. + INSERT INTO @availableJobs + SELECT TOP(@limit) Id, JobVersion + FROM dbo.ExportJob + WHERE (Status = 'Queued' OR (Status = 'Running' AND HeartbeatDateTime <= @expirationDateTime)) + ORDER BY HeartbeatDateTime + + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + + -- Update each available job's status to running both in the export table's status column and in the raw export job record JSON. + UPDATE dbo.ExportJob + SET Status = 'Running', HeartbeatDateTime = @heartbeatDateTime, RawJobRecord = JSON_MODIFY(RawJobRecord,'$.status', 'Running') + OUTPUT inserted.RawJobRecord, inserted.JobVersion + FROM dbo.ExportJob job INNER JOIN @availableJobs availableJob ON job.Id = availableJob.Id AND job.JobVersion = availableJob.JobVersion + + COMMIT TRANSACTION +GO + +/************************************************************* + Search Parameter Status Information +**************************************************************/ + +-- We adopted this naming convention for table-valued parameters because they are immutable. +CREATE TYPE dbo.SearchParamTableType_1 AS TABLE +( + Uri varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status varchar(10) NOT NULL, + IsPartiallySupported bit NOT NULL +) + +GO + +/************************************************************* + Stored procedures for search parameter information +**************************************************************/ +-- +-- STORED PROCEDURE +-- GetSearchParamStatuses +-- +-- DESCRIPTION +-- Gets all the search parameters and their statuses. +-- +-- RETURN VALUE +-- The search parameters and their statuses. +-- +CREATE PROCEDURE dbo.GetSearchParamStatuses +AS + SET NOCOUNT ON + + SELECT SearchParamId, Uri, Status, LastUpdated, IsPartiallySupported FROM dbo.SearchParam +GO + +-- +-- STORED PROCEDURE +-- UpsertSearchParams +-- +-- DESCRIPTION +-- Given a set of search parameters, creates or updates the parameters. +-- +-- PARAMETERS +-- @searchParams +-- * The updated existing search parameters or the new search parameters +-- +-- RETURN VALUE +-- The IDs and URIs of the search parameters that were inserted (not updated). +-- +CREATE PROCEDURE dbo.UpsertSearchParams + @searchParams dbo.SearchParamTableType_1 READONLY +AS + SET NOCOUNT ON + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + BEGIN TRANSACTION + + DECLARE @lastUpdated datetimeoffset(7) = SYSDATETIMEOFFSET() + + DECLARE @summaryOfChanges TABLE(Uri varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, Action varchar(20) NOT NULL) + + -- Acquire and hold an exclusive table lock for the entire transaction to prevent parameters from being added or modified during upsert. + MERGE INTO dbo.SearchParam WITH (TABLOCKX) AS target + USING @searchParams AS source + ON target.Uri = source.Uri + WHEN MATCHED THEN + UPDATE + SET Status = source.Status, LastUpdated = @lastUpdated, IsPartiallySupported = source.IsPartiallySupported + WHEN NOT MATCHED BY target THEN + INSERT + (Uri, Status, LastUpdated, IsPartiallySupported) + VALUES (source.Uri, source.Status, @lastUpdated, source.IsPartiallySupported) + OUTPUT source.Uri, $action INTO @summaryOfChanges; + + SELECT SearchParamId, SearchParam.Uri + FROM dbo.SearchParam searchParam + INNER JOIN @summaryOfChanges upsertedSearchParam + ON searchParam.Uri = upsertedSearchParam.Uri + WHERE upsertedSearchParam.Action = 'INSERT' + + COMMIT TRANSACTION +GO + +/************************************************************* + Reindex Job +**************************************************************/ +CREATE TABLE dbo.ReindexJob +( + Id varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status varchar(10) NOT NULL, + HeartbeatDateTime datetime2(7) NULL, + RawJobRecord varchar(max) NOT NULL, + JobVersion rowversion NOT NULL +) + +CREATE UNIQUE CLUSTERED INDEX IXC_ReindexJob ON dbo.ReindexJob +( + Id +) + +GO + +/************************************************************* + Stored procedures for reindexing +**************************************************************/ +-- +-- STORED PROCEDURE +-- Creates an reindex job. +-- +-- DESCRIPTION +-- Creates a new row to the ReindexJob table, adding a new job to the queue of jobs to be processed. +-- +-- PARAMETERS +-- @id +-- * The ID of the reindex job record +-- @status +-- * The status of the reindex job +-- @rawJobRecord +-- * A JSON document +-- +-- RETURN VALUE +-- The row version of the created reindex job. +-- +CREATE PROCEDURE dbo.CreateReindexJob + @id varchar(64), + @status varchar(10), + @rawJobRecord varchar(max) +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + + INSERT INTO dbo.ReindexJob + (Id, Status, HeartbeatDateTime, RawJobRecord) + VALUES + (@id, @status, @heartbeatDateTime, @rawJobRecord) + + SELECT CAST(MIN_ACTIVE_ROWVERSION() AS INT) + + COMMIT TRANSACTION +GO + +-- +-- STORED PROCEDURE +-- Gets an reindex job given its ID. +-- +-- DESCRIPTION +-- Retrieves the reindex job record from the ReindexJob table that has the matching ID. +-- +-- PARAMETERS +-- @id +-- * The ID of the reindex job record to retrieve +-- +-- RETURN VALUE +-- The matching reindex job. +-- +CREATE PROCEDURE dbo.GetReindexJobById + @id varchar(64) +AS + SET NOCOUNT ON + + SELECT RawJobRecord, JobVersion + FROM dbo.ReindexJob + WHERE Id = @id +GO + +-- +-- STORED PROCEDURE +-- Updates a reindex job. +-- +-- DESCRIPTION +-- Modifies an existing job in the ReindexJob table. +-- +-- PARAMETERS +-- @id +-- * The ID of the reindex job record +-- @status +-- * The status of the reindex job +-- @rawJobRecord +-- * A JSON document +-- @jobVersion +-- * The version of the job to update must match this +-- +-- RETURN VALUE +-- The row version of the updated reindex job. +-- +CREATE PROCEDURE dbo.UpdateReindexJob + @id varchar(64), + @status varchar(10), + @rawJobRecord varchar(max), + @jobVersion binary(8) +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + DECLARE @currentJobVersion binary(8) + + -- Acquire and hold an update lock on a row in the ReindexJob table for the entire transaction. + -- This ensures the version check and update occur atomically. + SELECT @currentJobVersion = JobVersion + FROM dbo.ReindexJob WITH (UPDLOCK, HOLDLOCK) + WHERE Id = @id + + IF (@currentJobVersion IS NULL) BEGIN + THROW 50404, 'Reindex job record not found', 1; + END + + IF (@jobVersion <> @currentJobVersion) BEGIN + THROW 50412, 'Precondition failed', 1; + END + + -- We will timestamp the jobs when we update them to track stale jobs. + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + + UPDATE dbo.ReindexJob + SET Status = @status, HeartbeatDateTime = @heartbeatDateTime, RawJobRecord = @rawJobRecord + WHERE Id = @id + + SELECT @@DBTS + + COMMIT TRANSACTION +GO + +-- +-- STORED PROCEDURE +-- Acquires reindex jobs. +-- +-- DESCRIPTION +-- Timestamps the available reindex jobs and sets their statuses to running. +-- +-- PARAMETERS +-- @jobHeartbeatTimeoutThresholdInSeconds +-- * The number of seconds that must pass before a reindex job is considered stale +-- @maximumNumberOfConcurrentJobsAllowed +-- * The maximum number of running jobs we can have at once +-- +-- RETURN VALUE +-- The updated jobs that are now running. +-- +CREATE PROCEDURE dbo.AcquireReindexJobs + @jobHeartbeatTimeoutThresholdInSeconds bigint, + @maximumNumberOfConcurrentJobsAllowed int +AS + SET NOCOUNT ON + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + BEGIN TRANSACTION + + -- We will consider a job to be stale if its timestamp is smaller than or equal to this. + DECLARE @expirationDateTime dateTime2(7) + SELECT @expirationDateTime = DATEADD(second, -@jobHeartbeatTimeoutThresholdInSeconds, SYSUTCDATETIME()) + + -- Get the number of jobs that are running and not stale. + -- Acquire and hold an exclusive table lock for the entire transaction to prevent jobs from being created, updated or deleted during acquisitions. + DECLARE @numberOfRunningJobs int + SELECT @numberOfRunningJobs = COUNT(*) FROM dbo.ReindexJob WITH (TABLOCKX) WHERE Status = 'Running' AND HeartbeatDateTime > @expirationDateTime + + -- Determine how many available jobs we can pick up. + DECLARE @limit int = @maximumNumberOfConcurrentJobsAllowed - @numberOfRunningJobs; + + IF (@limit > 0) BEGIN + + DECLARE @availableJobs TABLE (Id varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, JobVersion binary(8) NOT NULL) + + -- Get the available jobs, which are reindex jobs that are queued or stale. + -- Older jobs will be prioritized over newer ones. + INSERT INTO @availableJobs + SELECT TOP(@limit) Id, JobVersion + FROM dbo.ReindexJob + WHERE (Status = 'Queued' OR (Status = 'Running' AND HeartbeatDateTime <= @expirationDateTime)) + ORDER BY HeartbeatDateTime + + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + + -- Update each available job's status to running both in the reindex table's status column and in the raw reindex job record JSON. + UPDATE dbo.ReindexJob + SET Status = 'Running', HeartbeatDateTime = @heartbeatDateTime, RawJobRecord = JSON_MODIFY(RawJobRecord,'$.status', 'Running') + OUTPUT inserted.RawJobRecord, inserted.JobVersion + FROM dbo.ReindexJob job INNER JOIN @availableJobs availableJob ON job.Id = availableJob.Id AND job.JobVersion = availableJob.JobVersion + + END + + COMMIT TRANSACTION +GO + +-- +-- STORED PROCEDURE +-- Checks if there are any active reindex jobs. +-- +-- DESCRIPTION +-- Queries the datastore for any reindex job documents with a status of running, queued or paused. +-- +-- RETURN VALUE +-- The job IDs of any active reindex jobs. +-- +CREATE PROCEDURE dbo.CheckActiveReindexJobs +AS + SET NOCOUNT ON + + SELECT Id + FROM dbo.ReindexJob + WHERE Status = 'Running' OR Status = 'Queued' OR Status = 'Paused' +GO + +-- +-- STORED PROCEDURE +-- ReindexResource +-- +-- DESCRIPTION +-- Updates the search indices of a given resource +-- +-- PARAMETERS +-- @resourceTypeId +-- * The ID of the resource type (See ResourceType table) +-- @resourceId +-- * The resource ID (must be the same as the in the resource itself) +-- @etag +-- * If specified, the version of the resource to update +-- @searchParamHash +-- * A hash of the resource's latest indexed search parameters +-- @resourceWriteClaims +-- * Claims on the principal that performed the write +-- @compartmentAssignments +-- * Compartments that the resource is part of +-- @referenceSearchParams +-- * Extracted reference search params +-- @tokenSearchParams +-- * Extracted token search params +-- @tokenTextSearchParams +-- * The text representation of extracted token search params +-- @stringSearchParams +-- * Extracted string search params +-- @numberSearchParams +-- * Extracted number search params +-- @quantitySearchParams +-- * Extracted quantity search params +-- @uriSearchParams +-- * Extracted URI search params +-- @dateTimeSearchParms +-- * Extracted datetime search params +-- @referenceTokenCompositeSearchParams +-- * Extracted reference$token search params +-- @tokenTokenCompositeSearchParams +-- * Extracted token$token tokensearch params +-- @tokenDateTimeCompositeSearchParams +-- * Extracted token$datetime search params +-- @tokenQuantityCompositeSearchParams +-- * Extracted token$quantity search params +-- @tokenStringCompositeSearchParams +-- * Extracted token$string search params +-- @tokenNumberNumberCompositeSearchParams +-- * Extracted token$number$number search params +-- +CREATE PROCEDURE dbo.ReindexResource + @resourceTypeId smallint, + @resourceId varchar(64), + @eTag int = NULL, + @searchParamHash varchar(64), + @resourceWriteClaims dbo.BulkResourceWriteClaimTableType_1 READONLY, + @compartmentAssignments dbo.BulkCompartmentAssignmentTableType_1 READONLY, + @referenceSearchParams dbo.BulkReferenceSearchParamTableType_1 READONLY, + @tokenSearchParams dbo.BulkTokenSearchParamTableType_1 READONLY, + @tokenTextSearchParams dbo.BulkTokenTextTableType_1 READONLY, + @stringSearchParams dbo.BulkStringSearchParamTableType_1 READONLY, + @numberSearchParams dbo.BulkNumberSearchParamTableType_1 READONLY, + @quantitySearchParams dbo.BulkQuantitySearchParamTableType_1 READONLY, + @uriSearchParams dbo.BulkUriSearchParamTableType_1 READONLY, + @dateTimeSearchParms dbo.BulkDateTimeSearchParamTableType_1 READONLY, + @referenceTokenCompositeSearchParams dbo.BulkReferenceTokenCompositeSearchParamTableType_1 READONLY, + @tokenTokenCompositeSearchParams dbo.BulkTokenTokenCompositeSearchParamTableType_1 READONLY, + @tokenDateTimeCompositeSearchParams dbo.BulkTokenDateTimeCompositeSearchParamTableType_1 READONLY, + @tokenQuantityCompositeSearchParams dbo.BulkTokenQuantityCompositeSearchParamTableType_1 READONLY, + @tokenStringCompositeSearchParams dbo.BulkTokenStringCompositeSearchParamTableType_1 READONLY, + @tokenNumberNumberCompositeSearchParams dbo.BulkTokenNumberNumberCompositeSearchParamTableType_1 READONLY +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + DECLARE @resourceSurrogateId bigint + DECLARE @version bigint + + -- This should place a range lock on a row in the IX_Resource_ResourceTypeId_ResourceId nonclustered filtered index + SELECT @resourceSurrogateId = ResourceSurrogateId, @version = Version + FROM dbo.Resource WITH (UPDLOCK, HOLDLOCK) + WHERE ResourceTypeId = @resourceTypeId AND ResourceId = @resourceId AND IsHistory = 0 + + IF (@etag IS NOT NULL AND @etag <> @version) BEGIN + THROW 50412, 'Precondition failed', 1; + END + + UPDATE dbo.Resource + SET SearchParamHash = @searchParamHash + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + -- First, delete all the resource's indices. + DELETE FROM dbo.ResourceWriteClaim + WHERE ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.CompartmentAssignment + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.ReferenceSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.TokenSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.TokenText + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.StringSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.UriSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.NumberSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.QuantitySearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.DateTimeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.ReferenceTokenCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.TokenTokenCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.TokenDateTimeCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.TokenQuantityCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.TokenStringCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + DELETE FROM dbo.TokenNumberNumberCompositeSearchParam + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId = @resourceSurrogateId + + -- Next, insert all the new indices. + INSERT INTO dbo.ResourceWriteClaim + (ResourceSurrogateId, ClaimTypeId, ClaimValue) + SELECT @resourceSurrogateId, ClaimTypeId, ClaimValue + FROM @resourceWriteClaims + + INSERT INTO dbo.CompartmentAssignment + (ResourceTypeId, ResourceSurrogateId, CompartmentTypeId, ReferenceResourceId, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, CompartmentTypeId, ReferenceResourceId, 0 + FROM @compartmentAssignments + + INSERT INTO dbo.ReferenceSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion, 0 + FROM @referenceSearchParams + + INSERT INTO dbo.TokenSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, Code, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId, Code, 0 + FROM @tokenSearchParams + + INSERT INTO dbo.TokenText + (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, Text, 0 + FROM @tokenTextSearchParams + + INSERT INTO dbo.StringSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, TextOverflow, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, Text, TextOverflow, 0 + FROM @stringSearchParams + + INSERT INTO dbo.UriSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, Uri, 0 + FROM @uriSearchParams + + INSERT INTO dbo.NumberSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue, 0 + FROM @numberSearchParams + + INSERT INTO dbo.QuantitySearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue, 0 + FROM @quantitySearchParams + + INSERT INTO dbo.DateTimeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, 0 + FROM @dateTimeSearchParms + + INSERT INTO dbo.ReferenceTokenCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, 0 + FROM @referenceTokenCompositeSearchParams + + INSERT INTO dbo.TokenTokenCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, SystemId2, Code2, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId1, Code1, SystemId2, Code2, 0 + FROM @tokenTokenCompositeSearchParams + + INSERT INTO dbo.TokenDateTimeCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, StartDateTime2, EndDateTime2, IsLongerThanADay2, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId1, Code1, StartDateTime2, EndDateTime2, IsLongerThanADay2, 0 + FROM @tokenDateTimeCompositeSearchParams + + INSERT INTO dbo.TokenQuantityCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId1, Code1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2, 0 + FROM @tokenQuantityCompositeSearchParams + + INSERT INTO dbo.TokenStringCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, Text2, TextOverflow2, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId1, Code1, Text2, TextOverflow2, 0 + FROM @tokenStringCompositeSearchParams + + INSERT INTO dbo.TokenNumberNumberCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange, IsHistory) + SELECT DISTINCT @resourceTypeId, @resourceSurrogateId, SearchParamId, SystemId1, Code1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange, 0 + FROM @tokenNumberNumberCompositeSearchParams + + COMMIT TRANSACTION +GO + +CREATE TYPE dbo.BulkReindexResourceTableType_1 AS TABLE +( + Offset int NOT NULL, + ResourceTypeId smallint NOT NULL, + ResourceId varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ETag int NULL, + SearchParamHash varchar(64) NOT NULL +) + +GO + +-- +-- STORED PROCEDURE +-- BulkReindexResources +-- +-- DESCRIPTION +-- Updates the search indices of a batch of resources +-- +-- PARAMETERS +-- @resourcesToReindex +-- * The type IDs, IDs, eTags and hashes of the resources to reindex +-- @resourceWriteClaims +-- * Claims on the principal that performed the write +-- @compartmentAssignments +-- * Compartments that the resource is part of +-- @referenceSearchParams +-- * Extracted reference search params +-- @tokenSearchParams +-- * Extracted token search params +-- @tokenTextSearchParams +-- * The text representation of extracted token search params +-- @stringSearchParams +-- * Extracted string search params +-- @numberSearchParams +-- * Extracted number search params +-- @quantitySearchParams +-- * Extracted quantity search params +-- @uriSearchParams +-- * Extracted URI search params +-- @dateTimeSearchParms +-- * Extracted datetime search params +-- @referenceTokenCompositeSearchParams +-- * Extracted reference$token search params +-- @tokenTokenCompositeSearchParams +-- * Extracted token$token tokensearch params +-- @tokenDateTimeCompositeSearchParams +-- * Extracted token$datetime search params +-- @tokenQuantityCompositeSearchParams +-- * Extracted token$quantity search params +-- @tokenStringCompositeSearchParams +-- * Extracted token$string search params +-- @tokenNumberNumberCompositeSearchParams +-- * Extracted token$number$number search params +-- +-- RETURN VALUE +-- The number of resources that failed to reindex due to versioning conflicts. +-- +CREATE PROCEDURE dbo.BulkReindexResources + @resourcesToReindex dbo.BulkReindexResourceTableType_1 READONLY, + @resourceWriteClaims dbo.BulkResourceWriteClaimTableType_1 READONLY, + @compartmentAssignments dbo.BulkCompartmentAssignmentTableType_1 READONLY, + @referenceSearchParams dbo.BulkReferenceSearchParamTableType_1 READONLY, + @tokenSearchParams dbo.BulkTokenSearchParamTableType_1 READONLY, + @tokenTextSearchParams dbo.BulkTokenTextTableType_1 READONLY, + @stringSearchParams dbo.BulkStringSearchParamTableType_1 READONLY, + @numberSearchParams dbo.BulkNumberSearchParamTableType_1 READONLY, + @quantitySearchParams dbo.BulkQuantitySearchParamTableType_1 READONLY, + @uriSearchParams dbo.BulkUriSearchParamTableType_1 READONLY, + @dateTimeSearchParms dbo.BulkDateTimeSearchParamTableType_1 READONLY, + @referenceTokenCompositeSearchParams dbo.BulkReferenceTokenCompositeSearchParamTableType_1 READONLY, + @tokenTokenCompositeSearchParams dbo.BulkTokenTokenCompositeSearchParamTableType_1 READONLY, + @tokenDateTimeCompositeSearchParams dbo.BulkTokenDateTimeCompositeSearchParamTableType_1 READONLY, + @tokenQuantityCompositeSearchParams dbo.BulkTokenQuantityCompositeSearchParamTableType_1 READONLY, + @tokenStringCompositeSearchParams dbo.BulkTokenStringCompositeSearchParamTableType_1 READONLY, + @tokenNumberNumberCompositeSearchParams dbo.BulkTokenNumberNumberCompositeSearchParamTableType_1 READONLY +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + DECLARE @computedValues TABLE + ( + Offset int NOT NULL, + ResourceTypeId smallint NOT NULL, + VersionProvided bigint NULL, + SearchParamHash varchar(64) NOT NULL, + ResourceSurrogateId bigint NULL, + VersionInDatabase bigint NULL + ) + + INSERT INTO @computedValues + SELECT + resourceToReindex.Offset, + resourceToReindex.ResourceTypeId, + resourceToReindex.ETag, + resourceToReindex.SearchParamHash, + resourceInDB.ResourceSurrogateId, + resourceInDB.Version + FROM @resourcesToReindex resourceToReindex + LEFT OUTER JOIN dbo.Resource resourceInDB WITH (UPDLOCK, INDEX(IX_Resource_ResourceTypeId_ResourceId)) + ON resourceInDB.ResourceTypeId = resourceToReindex.ResourceTypeId + AND resourceInDB.ResourceId = resourceToReindex.ResourceId + AND resourceInDB.IsHistory = 0 + + DECLARE @versionDiff int + SET @versionDiff = (SELECT COUNT(*) FROM @computedValues WHERE VersionProvided IS NOT NULL AND VersionProvided <> VersionInDatabase) + + IF (@versionDiff > 0) BEGIN + -- Don't reindex resources that have outdated versions + DELETE FROM @computedValues + WHERE VersionProvided IS NOT NULL AND VersionProvided <> VersionInDatabase + END + + -- Update the search parameter hash value in the main resource table + UPDATE resourceInDB + SET resourceInDB.SearchParamHash = resourceToReindex.SearchParamHash + FROM @computedValues resourceToReindex + INNER JOIN dbo.Resource resourceInDB + ON resourceInDB.ResourceTypeId = resourceToReindex.ResourceTypeId AND resourceInDB.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + -- First, delete all the indices of the resources to reindex. + DELETE searchIndex FROM dbo.ResourceWriteClaim searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.CompartmentAssignment searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.ReferenceSearchParam searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.TokenSearchParam searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.TokenText searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.StringSearchParam searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.UriSearchParam searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.NumberSearchParam searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.QuantitySearchParam searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.DateTimeSearchParam searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.ReferenceTokenCompositeSearchParam searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.TokenTokenCompositeSearchParam searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.TokenDateTimeCompositeSearchParam searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.TokenQuantityCompositeSearchParam searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.TokenStringCompositeSearchParam searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + DELETE searchIndex FROM dbo.TokenNumberNumberCompositeSearchParam searchIndex + INNER JOIN @computedValues resourceToReindex + ON searchIndex.ResourceTypeId = resourceToReindex.ResourceTypeId AND searchIndex.ResourceSurrogateId = resourceToReindex.ResourceSurrogateId + + -- Next, insert all the new indices. + INSERT INTO dbo.ResourceWriteClaim + (ResourceSurrogateId, ClaimTypeId, ClaimValue) + SELECT DISTINCT resourceToReindex.ResourceSurrogateId, searchIndex.ClaimTypeId, searchIndex.ClaimValue + FROM @resourceWriteClaims searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.CompartmentAssignment + (ResourceTypeId, ResourceSurrogateId, CompartmentTypeId, ReferenceResourceId, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.CompartmentTypeId, searchIndex.ReferenceResourceId, 0 + FROM @compartmentAssignments searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.ReferenceSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.BaseUri, searchIndex.ReferenceResourceTypeId, searchIndex.ReferenceResourceId, searchIndex.ReferenceResourceVersion, 0 + FROM @referenceSearchParams searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.TokenSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, Code, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.SystemId, searchIndex.Code, 0 + FROM @tokenSearchParams searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.TokenText + (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.Text, 0 + FROM @tokenTextSearchParams searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.StringSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, TextOverflow, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.Text, searchIndex.TextOverflow, 0 + FROM @stringSearchParams searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.UriSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.Uri, 0 + FROM @uriSearchParams searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.NumberSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.SingleValue, searchIndex.LowValue, searchIndex.HighValue, 0 + FROM @numberSearchParams searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.QuantitySearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.SystemId, searchIndex.QuantityCodeId, searchIndex.SingleValue, searchIndex.LowValue, searchIndex.HighValue, 0 + FROM @quantitySearchParams searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.DateTimeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.StartDateTime, searchIndex.EndDateTime, searchIndex.IsLongerThanADay, 0 + FROM @dateTimeSearchParms searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.ReferenceTokenCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.BaseUri1, searchIndex.ReferenceResourceTypeId1, searchIndex.ReferenceResourceId1, searchIndex.ReferenceResourceVersion1, searchIndex.SystemId2, searchIndex.Code2, 0 + FROM @referenceTokenCompositeSearchParams searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.TokenTokenCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, SystemId2, Code2, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.SystemId1, searchIndex.Code1, searchIndex.SystemId2, searchIndex.Code2, 0 + FROM @tokenTokenCompositeSearchParams searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.TokenDateTimeCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, StartDateTime2, EndDateTime2, IsLongerThanADay2, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.SystemId1, searchIndex.Code1, searchIndex.StartDateTime2, searchIndex.EndDateTime2, searchIndex.IsLongerThanADay2, 0 + FROM @tokenDateTimeCompositeSearchParams searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.TokenQuantityCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.SystemId1, searchIndex.Code1, searchIndex.SingleValue2, searchIndex.SystemId2, searchIndex.QuantityCodeId2, searchIndex.LowValue2, searchIndex.HighValue2, 0 + FROM @tokenQuantityCompositeSearchParams searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.TokenStringCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, Text2, TextOverflow2, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.SystemId1, searchIndex.Code1, searchIndex.Text2, searchIndex.TextOverflow2, 0 + FROM @tokenStringCompositeSearchParams searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + INSERT INTO dbo.TokenNumberNumberCompositeSearchParam + (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange, IsHistory) + SELECT DISTINCT resourceToReindex.ResourceTypeId, resourceToReindex.ResourceSurrogateId, searchIndex.SearchParamId, searchIndex.SystemId1, searchIndex.Code1, searchIndex.SingleValue2, searchIndex.LowValue2, searchIndex.HighValue2, searchIndex.SingleValue3, searchIndex.LowValue3, searchIndex.HighValue3, searchIndex.HasRange, 0 + FROM @tokenNumberNumberCompositeSearchParams searchIndex + INNER JOIN @computedValues resourceToReindex ON searchIndex.Offset = resourceToReindex.Offset + + SELECT @versionDiff + + COMMIT TRANSACTION +GO + +/************************************************************* + Task Table +**************************************************************/ +CREATE TABLE [dbo].[TaskInfo]( + [TaskId] [varchar](64) NOT NULL, + [QueueId] [varchar](64) NOT NULL, + [Status] [smallint] NOT NULL, + [TaskTypeId] [smallint] NOT NULL, + [RunId] [varchar](50) null, + [IsCanceled] [bit] NOT NULL, + [RetryCount] [smallint] NOT NULL, + [MaxRetryCount] [smallint] NOT NULL, + [HeartbeatDateTime] [datetime2](7) NULL, + [InputData] [varchar](max) NOT NULL, + [TaskContext] [varchar](max) NULL, + [Result] [varchar](max) NULL +) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY] + +CREATE UNIQUE CLUSTERED INDEX IXC_Task on [dbo].[TaskInfo] +( + TaskId +) +GO + +/************************************************************* + Stored procedures for general task +**************************************************************/ +-- +-- STORED PROCEDURE +-- CreateTask_2 +-- +-- DESCRIPTION +-- Create task for given task payload. +-- +-- PARAMETERS +-- @taskId +-- * The ID of the task record to create +-- @queueId +-- * The number of seconds that must pass before an export job is considered stale +-- @taskTypeId +-- * The maximum number of running jobs we can have at once +-- @maxRetryCount +-- * The maximum number for retry operation +-- @inputData +-- * Input data payload for the task +-- @isUniqueTaskByType +-- * Only create task if there's no other active task with same task type id +-- +CREATE PROCEDURE [dbo].[CreateTask_2] + @taskId varchar(64), + @queueId varchar(64), + @taskTypeId smallint, + @maxRetryCount smallint = 3, + @inputData varchar(max), + @isUniqueTaskByType bit +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + DECLARE @status smallint = 1 + DECLARE @retryCount smallint = 0 + DECLARE @isCanceled bit = 0 + + -- Check if the task already be created + IF (@isUniqueTaskByType = 1) BEGIN + IF EXISTS + ( + SELECT * + FROM [dbo].[TaskInfo] + WHERE TaskId = @taskId or (TaskTypeId = @taskTypeId and Status <> 3) + ) + BEGIN + THROW 50409, 'Task already existed', 1; + END + END + ELSE BEGIN + IF EXISTS + ( + SELECT * + FROM [dbo].[TaskInfo] + WHERE TaskId = @taskId + ) + BEGIN + THROW 50409, 'Task already existed', 1; + END + END + + -- Create new task + INSERT INTO [dbo].[TaskInfo] + (TaskId, QueueId, Status, TaskTypeId, IsCanceled, RetryCount, MaxRetryCount, HeartbeatDateTime, InputData) + VALUES + (@taskId, @queueId, @status, @taskTypeId, @isCanceled, @retryCount, @maxRetryCount, @heartbeatDateTime, @inputData) + + SELECT TaskId, QueueId, Status, TaskTypeId, RunId, IsCanceled, RetryCount, MaxRetryCount, HeartbeatDateTime, InputData + FROM [dbo].[TaskInfo] + where TaskId = @taskId + + COMMIT TRANSACTION +GO + +/************************************************************* + Stored procedures for get task payload +**************************************************************/ +-- +-- STORED PROCEDURE +-- GetTaskDetails +-- +-- DESCRIPTION +-- Get task payload. +-- +-- PARAMETERS +-- @taskId +-- * The ID of the task record +-- +CREATE PROCEDURE [dbo].[GetTaskDetails] + @taskId varchar(64) +AS + SET NOCOUNT ON + + SELECT TaskId, QueueId, Status, TaskTypeId, RunId, IsCanceled, RetryCount, MaxRetryCount, HeartbeatDateTime, InputData, TaskContext, Result + FROM [dbo].[TaskInfo] + where TaskId = @taskId +GO + + +/************************************************************* + Stored procedures for update task context +**************************************************************/ +-- +-- STORED PROCEDURE +-- UpdateTaskContext +-- +-- DESCRIPTION +-- Update task context. +-- +-- PARAMETERS +-- @taskId +-- * The ID of the task record +-- @taskContext +-- * The context of the task +-- @runId +-- * Current runId for this exuction of the task +-- +CREATE PROCEDURE [dbo].[UpdateTaskContext] + @taskId varchar(64), + @taskContext varchar(max), + @runId varchar(50) +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + -- Can only update task context with same runid + IF NOT EXISTS + ( + SELECT * + FROM [dbo].[TaskInfo] + WHERE TaskId = @taskId and RunId = @runId + ) BEGIN + THROW 50404, 'Task not exist or runid not match', 1; + END + + -- We will timestamp the jobs when we update them to track stale jobs. + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + + UPDATE dbo.TaskInfo + SET HeartbeatDateTime = @heartbeatDateTime, TaskContext = @taskContext + WHERE TaskId = @taskId + + SELECT TaskId, QueueId, Status, TaskTypeId, RunId, IsCanceled, RetryCount, MaxRetryCount, HeartbeatDateTime, InputData, TaskContext, Result + FROM [dbo].[TaskInfo] + where TaskId = @taskId + + COMMIT TRANSACTION +GO + + +/************************************************************* + Stored procedures for keepalive task +**************************************************************/ +-- +-- STORED PROCEDURE +-- TaskKeepAlive +-- +-- DESCRIPTION +-- Task keep-alive. +-- +-- PARAMETERS +-- @taskId +-- * The ID of the task record +-- @runId +-- * Current runId for this exuction of the task +-- +CREATE PROCEDURE [dbo].[TaskKeepAlive] + @taskId varchar(64), + @runId varchar(50) +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + -- Can only update task context with same runid + IF NOT EXISTS + ( + SELECT * + FROM [dbo].[TaskInfo] + WHERE TaskId = @taskId and RunId = @runId + ) BEGIN + THROW 50404, 'Task not exist or runid not match', 1; + END + + -- We will timestamp the jobs when we update them to track stale jobs. + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + + UPDATE dbo.TaskInfo + SET HeartbeatDateTime = @heartbeatDateTime + WHERE TaskId = @taskId + + SELECT TaskId, QueueId, Status, TaskTypeId, RunId, IsCanceled, RetryCount, MaxRetryCount, HeartbeatDateTime, InputData, TaskContext, Result + FROM [dbo].[TaskInfo] + where TaskId = @taskId + + COMMIT TRANSACTION +GO + +/************************************************************* + Stored procedures for complete task with result +**************************************************************/ +-- +-- STORED PROCEDURE +-- CompleteTask +-- +-- DESCRIPTION +-- Complete the task and update task result. +-- +-- PARAMETERS +-- @taskId +-- * The ID of the task record +-- @taskResult +-- * The result for the task execution +-- @runId +-- * Current runId for this exuction of the task +-- +CREATE PROCEDURE [dbo].[CompleteTask] + @taskId varchar(64), + @taskResult varchar(max), + @runId varchar(50) +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + -- Can only complete task with same runid + IF NOT EXISTS + ( + SELECT * + FROM [dbo].[TaskInfo] + WHERE TaskId = @taskId and RunId = @runId + ) BEGIN + THROW 50404, 'Task not exist or runid not match', 1; + END + + -- We will timestamp the jobs when we update them to track stale jobs. + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + + UPDATE dbo.TaskInfo + SET Status = 3, HeartbeatDateTime = @heartbeatDateTime, Result = @taskResult + WHERE TaskId = @taskId + + SELECT TaskId, QueueId, Status, TaskTypeId, RunId, IsCanceled, RetryCount, MaxRetryCount, HeartbeatDateTime, InputData, TaskContext, Result + FROM [dbo].[TaskInfo] + where TaskId = @taskId + + COMMIT TRANSACTION +GO + + +/************************************************************* + Stored procedures for cancel task +**************************************************************/ +-- +-- STORED PROCEDURE +-- CancelTask +-- +-- DESCRIPTION +-- Cancel the task and update task status. +-- +-- PARAMETERS +-- @taskId +-- * The ID of the task record +-- +CREATE PROCEDURE [dbo].[CancelTask] + @taskId varchar(64) +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + -- We will timestamp the jobs when we update them to track stale jobs. + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + + IF NOT EXISTS + ( + SELECT * + FROM [dbo].[TaskInfo] + WHERE TaskId = @taskId + ) BEGIN + THROW 50404, 'Task not exist', 1; + END + + UPDATE dbo.TaskInfo + SET IsCanceled = 1, HeartbeatDateTime = @heartbeatDateTime + WHERE TaskId = @taskId + + SELECT TaskId, QueueId, Status, TaskTypeId, RunId, IsCanceled, RetryCount, MaxRetryCount, HeartbeatDateTime, InputData, TaskContext, Result + FROM [dbo].[TaskInfo] + where TaskId = @taskId + + COMMIT TRANSACTION +GO + + +/************************************************************* + Stored procedures for reset task +**************************************************************/ +-- +-- STORED PROCEDURE +-- ResetTask +-- +-- DESCRIPTION +-- Reset the task status. +-- +-- PARAMETERS +-- @taskId +-- * The ID of the task record +-- @runId +-- * Current runId for this exuction of the task +-- +CREATE PROCEDURE [dbo].[ResetTask] + @taskId varchar(64), + @runId varchar(50), + @result varchar(max) +AS + SET NOCOUNT ON + + SET XACT_ABORT ON + BEGIN TRANSACTION + + -- Can only reset task with same runid + DECLARE @retryCount smallint + DECLARE @status smallint + DECLARE @maxRetryCount smallint + + SELECT @retryCount = RetryCount, @status = Status, @maxRetryCount = MaxRetryCount + FROM [dbo].[TaskInfo] + WHERE TaskId = @taskId and RunId = @runId + + -- We will timestamp the jobs when we update them to track stale jobs. + IF (@retryCount IS NULL) BEGIN + THROW 50404, 'Task not exist or runid not match', 1; + END + + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + + IF (@retryCount >= @maxRetryCount) BEGIN + UPDATE dbo.TaskInfo + SET Status = 3, HeartbeatDateTime = @heartbeatDateTime, Result = @result + WHERE TaskId = @taskId + END + Else IF (@status <> 3) BEGIN + UPDATE dbo.TaskInfo + SET Status = 1, HeartbeatDateTime = @heartbeatDateTime, Result = @result, RetryCount = @retryCount + 1 + WHERE TaskId = @taskId + END + + SELECT TaskId, QueueId, Status, TaskTypeId, RunId, IsCanceled, RetryCount, MaxRetryCount, HeartbeatDateTime, InputData, TaskContext, Result + FROM [dbo].[TaskInfo] + where TaskId = @taskId + + COMMIT TRANSACTION +GO + +/************************************************************* + Stored procedures for get next available task +**************************************************************/ +-- +-- STORED PROCEDURE +-- GetNextTask +-- +-- DESCRIPTION +-- Get next available tasks +-- +-- PARAMETERS +-- @queueId +-- * The ID of the task record +-- @count +-- * Batch count for tasks list +-- @taskHeartbeatTimeoutThresholdInSeconds +-- * Timeout threshold in seconds for heart keep alive +CREATE PROCEDURE [dbo].[GetNextTask_2] + @queueId varchar(64), + @count smallint, + @taskHeartbeatTimeoutThresholdInSeconds int = 600 +AS + SET NOCOUNT ON + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + BEGIN TRANSACTION + + -- We will consider a job to be stale if its timestamp is smaller than or equal to this. + DECLARE @expirationDateTime dateTime2(7) + SELECT @expirationDateTime = DATEADD(second, -@taskHeartbeatTimeoutThresholdInSeconds, SYSUTCDATETIME()) + + DECLARE @availableJobs TABLE ( + TaskId varchar(64), + QueueId varchar(64), + Status smallint, + TaskTypeId smallint, + IsCanceled bit, + RetryCount smallint, + HeartbeatDateTime datetime2, + InputData varchar(max), + TaskContext varchar(max), + Result varchar(max) + ) + + INSERT INTO @availableJobs + SELECT TOP(@count) TaskId, QueueId, Status, TaskTypeId, IsCanceled, RetryCount, HeartbeatDateTime, InputData, TaskContext, Result + FROM dbo.TaskInfo + WHERE (QueueId = @queueId AND (Status = 1 OR (Status = 2 AND HeartbeatDateTime <= @expirationDateTime))) + ORDER BY HeartbeatDateTime + + DECLARE @heartbeatDateTime datetime2(7) = SYSUTCDATETIME() + + UPDATE dbo.TaskInfo + SET Status = 2, HeartbeatDateTime = @heartbeatDateTime, RunId = CAST(NEWID() AS NVARCHAR(50)) + FROM dbo.TaskInfo task INNER JOIN @availableJobs availableJob ON task.TaskId = availableJob.TaskId + + Select task.TaskId, task.QueueId, task.Status, task.TaskTypeId, task.RunId, task.IsCanceled, task.RetryCount, task.MaxRetryCount, task.HeartbeatDateTime, task.InputData, task.TaskContext, task.Result + from dbo.TaskInfo task INNER JOIN @availableJobs availableJob ON task.TaskId = availableJob.TaskId + + COMMIT TRANSACTION +GO + +/************************************************************* + Resource change capture feature +**************************************************************/ + +/************************************************************* + Resource change data table +**************************************************************/ + +CREATE TABLE dbo.ResourceChangeData +( + Id bigint IDENTITY(1,1) NOT NULL, + Timestamp datetime2(7) NOT NULL CONSTRAINT DF_ResourceChangeData_Timestamp DEFAULT sysutcdatetime(), + ResourceId varchar(64) NOT NULL, + ResourceTypeId smallint NOT NULL, + ResourceVersion int NOT NULL, + ResourceChangeTypeId tinyint NOT NULL, + CONSTRAINT PK_ResourceChangeData PRIMARY KEY CLUSTERED (Id) +) +ON [PRIMARY] +GO + +/************************************************************* + Resource change type table +**************************************************************/ +CREATE TABLE dbo.ResourceChangeType +( + ResourceChangeTypeId tinyint NOT NULL, + Name nvarchar(50) NOT NULL, + CONSTRAINT PK_ResourceChangeType PRIMARY KEY CLUSTERED (ResourceChangeTypeId), + CONSTRAINT UQ_ResourceChangeType_Name UNIQUE NONCLUSTERED (Name) +) +ON [PRIMARY] +GO + +INSERT dbo.ResourceChangeType (ResourceChangeTypeId, Name) VALUES (0, N'Creation') +INSERT dbo.ResourceChangeType (ResourceChangeTypeId, Name) VALUES (1, N'Update') +INSERT dbo.ResourceChangeType (ResourceChangeTypeId, Name) VALUES (2, N'Deletion') +GO + + +/************************************************************* + Stored procedures for capturing and fetching resource changes +**************************************************************/ +-- +-- STORED PROCEDURE +-- CaptureResourceChanges +-- +-- DESCRIPTION +-- Inserts resource change data +-- +-- PARAMETERS +-- @isDeleted +-- * Whether this resource marks the resource as deleted. +-- @version +-- * The version of the resource being written +-- @resourceId +-- * The resource ID +-- @resourceTypeId +-- * The ID of the resource type +-- +-- RETURN VALUE +-- It does not return a value. +-- +CREATE PROCEDURE dbo.CaptureResourceChanges + @isDeleted bit, + @version int, + @resourceId varchar(64), + @resourceTypeId smallint +AS +BEGIN + -- The CaptureResourceChanges procedure is intended to be called from + -- the UpsertResource_4 procedure, so it does not begin a new transaction here. + DECLARE @changeType SMALLINT + IF (@isDeleted = 1) BEGIN + SET @changeType = 2 /* DELETION */ + END + ELSE BEGIN + IF (@version = 1) BEGIN + SET @changeType = 0 /* CREATION */ + END + ELSE BEGIN + SET @changeType = 1 /* UPDATE */ + END + END + + INSERT INTO dbo.ResourceChangeData + (ResourceId, ResourceTypeId, ResourceVersion, ResourceChangeTypeId) + VALUES + (@resourceId, @resourceTypeId, @version, @changeType) +END +GO + +-- +-- STORED PROCEDURE +-- FetchResourceChanges +-- +-- DESCRIPTION +-- Returns the number of resource change records from startId. The start id is inclusive. +-- +-- PARAMETERS +-- @startId +-- * The start id of resource change records to fetch. +-- @pageSize +-- * The page size for fetching resource change records. +-- +-- RETURN VALUE +-- Resource change data rows. +-- +CREATE PROCEDURE dbo.FetchResourceChanges + @startId bigint, + @pageSize smallint +AS +BEGIN + + SET NOCOUNT ON; + + -- Given the fact that Read Committed Snapshot isolation level is enabled on the FHIR database, + -- using the Repeatable Read isolation level table hint to avoid skipping resource changes + -- due to interleaved transactions on the resource change data table. + -- In Repeatable Read, the select query execution will be blocked until other open transactions are completed + -- for rows that match the search condition of the select statement. + -- A write transaction (update/delete) on the rows that match + -- the search condition of the select statement will wait until the read transaction is completed. + -- But, other transactions can insert new rows. + SELECT TOP(@pageSize) Id, + Timestamp, + ResourceId, + ResourceTypeId, + ResourceVersion, + ResourceChangeTypeId + FROM dbo.ResourceChangeData WITH (REPEATABLEREAD) + WHERE Id >= @startId ORDER BY Id ASC +END +GO + +/************************************************************* + Event Agent checkpoint feature +**************************************************************/ + +/************************************************************* + Event Agent checkpoint table +**************************************************************/ + +IF NOT EXISTS (SELECT 1 FROM sys.tables WHERE name = 'EventAgentCheckpoint') +BEGIN + CREATE TABLE dbo.EventAgentCheckpoint + ( + CheckpointId varchar(64) NOT NULL, + LastProcessedDateTime datetimeoffset(7), + LastProcessedIdentifier varchar(64), + UpdatedOn datetime2(7) NOT NULL DEFAULT sysutcdatetime(), + CONSTRAINT PK_EventAgentCheckpoint PRIMARY KEY CLUSTERED (CheckpointId) + ) + ON [PRIMARY] +END +GO + +/************************************************************* + Stored procedures for getting and setting checkpoints +**************************************************************/ +-- +-- STORED PROCEDURE +-- UpdateEventAgentCheckpoint +-- +-- DESCRIPTION +-- Sets a checkpoint for an Event Agent +-- +-- PARAMETERS +-- @CheckpointId +-- * The identifier of the checkpoint. +-- @LastProcessedDateTime +-- * The datetime of last item that was processed. +-- @LastProcessedIdentifier +-- *The identifier of the last item that was processed. +-- +-- RETURN VALUE +-- It does not return a value. +-- +CREATE OR ALTER PROCEDURE dbo.UpdateEventAgentCheckpoint + @CheckpointId varchar(64), + @LastProcessedDateTime datetimeoffset(7) = NULL, + @LastProcessedIdentifier varchar(64) = NULL +AS +BEGIN + IF EXISTS (SELECT * FROM dbo.EventAgentCheckpoint WHERE CheckpointId = @CheckpointId) + UPDATE dbo.EventAgentCheckpoint SET CheckpointId = @CheckpointId, LastProcessedDateTime = @LastProcessedDateTime, LastProcessedIdentifier = @LastProcessedIdentifier, UpdatedOn = sysutcdatetime() + WHERE CheckpointId = @CheckpointId + ELSE + INSERT INTO dbo.EventAgentCheckpoint + (CheckpointId, LastProcessedDateTime, LastProcessedIdentifier, UpdatedOn) + VALUES + (@CheckpointId, @LastProcessedDateTime, @LastProcessedIdentifier, sysutcdatetime()) +END +GO + +-- +-- STORED PROCEDURE +-- GetEventAgentCheckpoint +-- +-- DESCRIPTION +-- Gets a checkpoint for an Event Agent +-- +-- PARAMETERS +-- @Id +-- * The identifier of the checkpoint. +-- +-- RETURN VALUE +-- A checkpoint for the given checkpoint id, if one exists. +-- +CREATE OR ALTER PROCEDURE dbo.FetchEventAgentCheckpoint + @CheckpointId varchar(64) +AS +BEGIN + SELECT TOP(1) CheckpointId, LastProcessedDateTime, LastProcessedIdentifier + FROM dbo.EventAgentCheckpoint + WHERE CheckpointId = @CheckpointId +END +GO + +COMMIT TRANSACTION + +GO + +/************************************************************* + Resource Bulk Import feature +**************************************************************/ +CREATE TYPE dbo.BulkImportResourceType_1 AS TABLE +( + ResourceTypeId smallint NOT NULL, + ResourceId varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Version int NOT NULL, + IsHistory bit NOT NULL, + ResourceSurrogateId bigint NOT NULL, + IsDeleted bit NOT NULL, + RequestMethod varchar(10) NULL, + RawResource varbinary(max) NOT NULL, + IsRawResourceMetaSet bit NOT NULL DEFAULT 0, + SearchParamHash varchar(64) NULL +) +GO + +/************************************************************* + Stored procedures for batch delete resources +**************************************************************/ +-- +-- STORED PROCEDURE +-- BatchDeleteResources +-- +-- DESCRIPTION +-- Batch delete resources +-- +-- PARAMETERS +-- @resourceTypeId +-- * The resoruce type id +-- @startResourceSurrogateId +-- * The start ResourceSurrogateId +-- @endResourceSurrogateId +-- * The end ResourceSurrogateId +-- @batchSize +-- * Max batch size for delete operation +CREATE PROCEDURE dbo.BatchDeleteResources + @resourceTypeId smallint, + @startResourceSurrogateId bigint, + @endResourceSurrogateId bigint, + @batchSize int +AS + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + BEGIN TRANSACTION + + DELETE Top(@batchSize) FROM dbo.Resource WITH (TABLOCK) + WHERE ResourceTypeId = @resourceTypeId AND ResourceSurrogateId >= @startResourceSurrogateId AND ResourceSurrogateId < @endResourceSurrogateId + + COMMIT TRANSACTION + + return @@rowcount +GO + +/************************************************************* + Stored procedures for batch delete ResourceWriteClaims +**************************************************************/ +-- +-- STORED PROCEDURE +-- BatchDeleteResourceWriteClaims +-- +-- DESCRIPTION +-- Batch delete ResourceWriteClaims +-- +-- PARAMETERS +-- @startResourceSurrogateId +-- * The start ResourceSurrogateId +-- @endResourceSurrogateId +-- * The end ResourceSurrogateId +-- @batchSize +-- * Max batch size for delete operation +CREATE PROCEDURE dbo.BatchDeleteResourceWriteClaims + @startResourceSurrogateId bigint, + @endResourceSurrogateId bigint, + @batchSize int +AS + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + BEGIN TRANSACTION + + DELETE Top(@batchSize) FROM dbo.ResourceWriteClaim WITH (TABLOCK) + WHERE ResourceSurrogateId >= @startResourceSurrogateId AND ResourceSurrogateId < @endResourceSurrogateId + + COMMIT TRANSACTION + + return @@rowcount +GO + + +/************************************************************* + Stored procedures for batch delete ResourceParams +**************************************************************/ +-- +-- STORED PROCEDURE +-- BatchDeleteResourceParams +-- +-- DESCRIPTION +-- Batch delete ResourceParams +-- +-- PARAMETERS +-- @tableName +-- * Resource params table name +-- @resourceTypeId +-- * Resource type id +-- @startResourceSurrogateId +-- * The start ResourceSurrogateId +-- @endResourceSurrogateId +-- * The end ResourceSurrogateId +-- @batchSize +-- * Max batch size for delete operation +CREATE PROCEDURE dbo.BatchDeleteResourceParams + @tableName nvarchar(128), + @resourceTypeId smallint, + @startResourceSurrogateId bigint, + @endResourceSurrogateId bigint, + @batchSize int +AS + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + BEGIN TRANSACTION + + DECLARE @Sql NVARCHAR(MAX); + DECLARE @ParmDefinition NVARCHAR(512); + + IF OBJECT_ID(@tableName) IS NOT NULL BEGIN + SET @sql = N'DELETE TOP(@BatchSizeParam) FROM ' + @tableName + N' WITH (TABLOCK) WHERE ResourceTypeId = @ResourceTypeIdParam AND ResourceSurrogateId >= @StartResourceSurrogateIdParam AND ResourceSurrogateId < @EndResourceSurrogateIdParam' + SET @parmDefinition = N'@BatchSizeParam int, @ResourceTypeIdParam smallint, @StartResourceSurrogateIdParam bigint, @EndResourceSurrogateIdParam bigint'; + + EXECUTE sp_executesql @sql, @parmDefinition, + @BatchSizeParam = @batchSize, + @ResourceTypeIdParam = @resourceTypeId, + @StartResourceSurrogateIdParam = @startResourceSurrogateId, + @EndResourceSurrogateIdParam = @endResourceSurrogateId + END + + COMMIT TRANSACTION + + return @@rowcount +GO + +/************************************************************* + Stored procedures for disable index +**************************************************************/ +-- +-- STORED PROCEDURE +-- DisableIndex +-- +-- DESCRIPTION +-- Stored procedures for disable index +-- +-- PARAMETERS +-- @tableName +-- * index table name +-- @indexName +-- * index name +CREATE PROCEDURE [dbo].[DisableIndex] + @tableName nvarchar(128), + @indexName nvarchar(128) +AS + SET NOCOUNT ON + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + + DECLARE @IsExecuted INT + SET @IsExecuted = 0 + + BEGIN TRANSACTION + + IF EXISTS + ( + SELECT * + FROM [sys].[indexes] + WHERE name = @indexName + AND object_id = OBJECT_ID(@tableName) + AND is_disabled = 0 + ) + BEGIN + DECLARE @Sql NVARCHAR(MAX); + + SET @Sql = N'ALTER INDEX ' + QUOTENAME(@indexName) + + N' on ' + @tableName + ' Disable' + + EXECUTE sp_executesql @Sql + + SET @IsExecuted = 1 + END + + COMMIT TRANSACTION + + RETURN @IsExecuted +GO + +/************************************************************* + Stored procedures for rebuild index +**************************************************************/ +-- +-- STORED PROCEDURE +-- RebuildIndex +-- +-- DESCRIPTION +-- Stored procedures for rebuild index +-- +-- PARAMETERS +-- @tableName +-- * index table name +-- @indexName +-- * index name +CREATE PROCEDURE [dbo].[RebuildIndex] + @tableName nvarchar(128), + @indexName nvarchar(128) +AS + SET NOCOUNT ON + SET XACT_ABORT ON + + SET TRANSACTION ISOLATION LEVEL SERIALIZABLE + + DECLARE @IsExecuted INT + SET @IsExecuted = 0 + + BEGIN TRANSACTION + + IF EXISTS + ( + SELECT * + FROM [sys].[indexes] + WHERE name = @indexName + AND object_id = OBJECT_ID(@tableName) + AND is_disabled = 1 + ) + BEGIN + DECLARE @Sql NVARCHAR(MAX); + + SET @Sql = N'ALTER INDEX ' + QUOTENAME(@indexName) + + N' on ' + @tableName + ' Rebuild' + + EXECUTE sp_executesql @Sql + + SET @IsExecuted = 1 + END + + COMMIT TRANSACTION + + RETURN @IsExecuted +GO + +/************************************************************* + Stored procedures for bulk merge resources +**************************************************************/ +-- +-- STORED PROCEDURE +-- BulkMergeResource +-- +-- DESCRIPTION +-- Stored procedures for bulk merge resource +-- +-- PARAMETERS +-- @resources +-- * input resources +CREATE PROCEDURE dbo.BulkMergeResource + @resources dbo.BulkImportResourceType_1 READONLY +AS + SET NOCOUNT ON + SET XACT_ABORT ON + + BEGIN TRANSACTION + + MERGE INTO [dbo].[Resource] WITH (ROWLOCK, INDEX(IX_Resource_ResourceTypeId_ResourceId_Version)) AS target + USING @resources AS source + ON source.[ResourceTypeId] = target.[ResourceTypeId] + AND source.[ResourceId] = target.[ResourceId] + AND source.[Version] = target.[Version] + WHEN NOT MATCHED BY target THEN + INSERT ([ResourceTypeId] + , [ResourceId] + , [Version] + , [IsHistory] + , [ResourceSurrogateId] + , [IsDeleted] + , [RequestMethod] + , [RawResource] + , [IsRawResourceMetaSet] + , [SearchParamHash]) + VALUES ([ResourceTypeId] + , [ResourceId] + , [Version] + , [IsHistory] + , [ResourceSurrogateId] + , [IsDeleted] + , [RequestMethod] + , [RawResource] + , [IsRawResourceMetaSet] + , [SearchParamHash]) + OUTPUT Inserted.[ResourceSurrogateId]; + + COMMIT TRANSACTION +GO diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs index 7a317b73ca..599be6d070 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs @@ -26,5 +26,6 @@ public enum SchemaVersion V14 = 14, V15 = 15, V16 = 16, + V17 = 17, } } diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs index 954149590b..b6dbec6245 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs @@ -8,7 +8,7 @@ namespace Microsoft.Health.Fhir.SqlServer.Features.Schema public static class SchemaVersionConstants { public const int Min = (int)SchemaVersion.V4; - public const int Max = (int)SchemaVersion.V16; + public const int Max = (int)SchemaVersion.V17; public const int SearchParameterStatusSchemaVersion = (int)SchemaVersion.V6; public const int SupportForReferencesWithMissingTypeVersion = (int)SchemaVersion.V7; public const int SearchParameterHashSchemaVersion = (int)SchemaVersion.V8; diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Search/SqlServerSearchService.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Search/SqlServerSearchService.cs index fd2740bab3..a33bbe61d3 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Search/SqlServerSearchService.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Search/SqlServerSearchService.cs @@ -19,6 +19,7 @@ using Microsoft.Extensions.Logging; using Microsoft.Health.Core.Features.Context; using Microsoft.Health.Fhir.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Features.Operations; using Microsoft.Health.Fhir.Core.Features.Persistence; using Microsoft.Health.Fhir.Core.Features.Search; using Microsoft.Health.Fhir.Core.Features.Search.Expressions; @@ -53,6 +54,7 @@ internal class SqlServerSearchService : SearchService private readonly SqlConnectionWrapperFactory _sqlConnectionWrapperFactory; private const string SortValueColumnName = "SortValue"; private readonly SchemaInformation _schemaInformation; + private readonly ICompressedRawResourceConverter _compressedRawResourceConverter; private readonly RequestContextAccessor _requestContextAccessor; private const int _resourceTableColumnCount = 10; private readonly SearchParameterInfo _fakeLastUpdate = new SearchParameterInfo(SearchParameterNames.LastUpdated, SearchParameterNames.LastUpdated); @@ -68,6 +70,7 @@ public SqlServerSearchService( SqlConnectionWrapperFactory sqlConnectionWrapperFactory, SchemaInformation schemaInformation, RequestContextAccessor requestContextAccessor, + ICompressedRawResourceConverter compressedRawResourceConverter, ILogger logger) : base(searchOptionsFactory, fhirDataStore) { @@ -89,6 +92,7 @@ public SqlServerSearchService( _schemaInformation = schemaInformation; _requestContextAccessor = requestContextAccessor; + _compressedRawResourceConverter = compressedRawResourceConverter; } public override async Task SearchAsync(SearchOptions searchOptions, CancellationToken cancellationToken) @@ -291,7 +295,7 @@ private async Task SearchImpl(SearchOptions searchOptions, SqlSear string rawResource; using (rawResourceStream) { - rawResource = await CompressedRawResourceConverter.ReadCompressedRawResource(rawResourceStream); + rawResource = await _compressedRawResourceConverter.ReadCompressedRawResource(rawResourceStream); } // See if this resource is a continuation token candidate and increase the count diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/CompressedRawResourceConverter.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/CompressedRawResourceConverter.cs index 74dc4192d5..3d0bed3572 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/CompressedRawResourceConverter.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/CompressedRawResourceConverter.cs @@ -7,18 +7,23 @@ using System.IO.Compression; using System.Text; using System.Threading.Tasks; +using Microsoft.Health.Fhir.Core.Features.Operations; namespace Microsoft.Health.Fhir.SqlServer.Features.Storage { /// /// Handles converting raw resource strings to compressed streams for storage in the database and vice-versa. /// - internal class CompressedRawResourceConverter + internal class CompressedRawResourceConverter : ICompressedRawResourceConverter { internal static readonly Encoding LegacyResourceEncoding = new UnicodeEncoding(bigEndian: false, byteOrderMark: false); internal static readonly Encoding ResourceEncoding = new UTF8Encoding(encoderShouldEmitUTF8Identifier: true); - public static async Task ReadCompressedRawResource(Stream compressedResourceStream) + public CompressedRawResourceConverter() + { + } + + public async Task ReadCompressedRawResource(Stream compressedResourceStream) { await using var gzipStream = new GZipStream(compressedResourceStream, CompressionMode.Decompress, leaveOpen: true); @@ -29,7 +34,7 @@ public static async Task ReadCompressedRawResource(Stream compressedReso return await reader.ReadToEndAsync(); } - public static void WriteCompressedRawResource(Stream outputStream, string rawResource) + public void WriteCompressedRawResource(Stream outputStream, string rawResource) { using var gzipStream = new GZipStream(outputStream, CompressionMode.Compress, leaveOpen: true); using var writer = new StreamWriter(gzipStream, ResourceEncoding); diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerFhirDataStore.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerFhirDataStore.cs index 38791cbacb..58ab070cd0 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerFhirDataStore.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerFhirDataStore.cs @@ -18,6 +18,7 @@ using Microsoft.Health.Fhir.Core.Configs; using Microsoft.Health.Fhir.Core.Exceptions; using Microsoft.Health.Fhir.Core.Features.Conformance; +using Microsoft.Health.Fhir.Core.Features.Operations; using Microsoft.Health.Fhir.Core.Features.Persistence; using Microsoft.Health.Fhir.Core.Models; using Microsoft.Health.Fhir.SqlServer.Features.Schema; @@ -47,6 +48,7 @@ internal class SqlServerFhirDataStore : IFhirDataStore, IProvideCapability private readonly RecyclableMemoryStreamManager _memoryStreamManager; private readonly CoreFeatureConfiguration _coreFeatures; private readonly SqlConnectionWrapperFactory _sqlConnectionWrapperFactory; + private readonly ICompressedRawResourceConverter _compressedRawResourceConverter; private readonly ILogger _logger; private readonly SchemaInformation _schemaInformation; @@ -61,6 +63,7 @@ public SqlServerFhirDataStore( VLatest.BulkReindexResourcesTvpGenerator> bulkReindexResourcesTvpGeneratorVLatest, IOptions coreFeatures, SqlConnectionWrapperFactory sqlConnectionWrapperFactory, + ICompressedRawResourceConverter compressedRawResourceConverter, ILogger logger, SchemaInformation schemaInformation) { @@ -74,6 +77,7 @@ public SqlServerFhirDataStore( EnsureArg.IsNotNull(bulkReindexResourcesTvpGeneratorVLatest, nameof(bulkReindexResourcesTvpGeneratorVLatest)); EnsureArg.IsNotNull(coreFeatures, nameof(coreFeatures)); EnsureArg.IsNotNull(sqlConnectionWrapperFactory, nameof(sqlConnectionWrapperFactory)); + EnsureArg.IsNotNull(compressedRawResourceConverter, nameof(compressedRawResourceConverter)); EnsureArg.IsNotNull(logger, nameof(logger)); EnsureArg.IsNotNull(schemaInformation, nameof(schemaInformation)); @@ -87,6 +91,7 @@ public SqlServerFhirDataStore( _bulkReindexResourcesTvpGeneratorVLatest = bulkReindexResourcesTvpGeneratorVLatest; _coreFeatures = coreFeatures.Value; _sqlConnectionWrapperFactory = sqlConnectionWrapperFactory; + _compressedRawResourceConverter = compressedRawResourceConverter; _logger = logger; _schemaInformation = schemaInformation; @@ -108,7 +113,7 @@ public async Task UpsertAsync(ResourceWrapper resource, WeakETag using (SqlCommandWrapper sqlCommandWrapper = sqlConnectionWrapper.CreateSqlCommand()) using (var stream = new RecyclableMemoryStream(_memoryStreamManager)) { - CompressedRawResourceConverter.WriteCompressedRawResource(stream, resource.RawResource.Data); + _compressedRawResourceConverter.WriteCompressedRawResource(stream, resource.RawResource.Data); stream.Seek(0, 0); @@ -273,7 +278,7 @@ public async Task GetAsync(ResourceKey key, CancellationToken c string rawResource; using (rawResourceStream) { - rawResource = await CompressedRawResourceConverter.ReadCompressedRawResource(rawResourceStream); + rawResource = await _compressedRawResourceConverter.ReadCompressedRawResource(rawResourceStream); } var isRawResourceMetaSet = sqlDataReader.Read(resourceTable.IsRawResourceMetaSet, 5); diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskConsumer.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskConsumer.cs index 04ed6593e7..23e96bc022 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskConsumer.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskConsumer.cs @@ -105,14 +105,15 @@ public async Task> GetNextMessagesAsync(short coun using (SqlConnectionWrapper sqlConnectionWrapper = await _sqlConnectionWrapperFactory.ObtainSqlConnectionWrapperAsync(cancellationToken, true)) using (SqlCommandWrapper sqlCommandWrapper = sqlConnectionWrapper.CreateSqlCommand()) { - VLatest.GetNextTask.PopulateCommand(sqlCommandWrapper, _taskHostingConfiguration.QueueId, count, taskHeartbeatTimeoutThresholdInSeconds); + string queueId = _taskHostingConfiguration.QueueId; + VLatest.GetNextTask.PopulateCommand(sqlCommandWrapper, queueId, count, taskHeartbeatTimeoutThresholdInSeconds); SqlDataReader sqlDataReader = await sqlCommandWrapper.ExecuteReaderAsync(cancellationToken); var taskInfoTable = VLatest.TaskInfo; while (sqlDataReader.Read()) { string id = sqlDataReader.Read(taskInfoTable.TaskId, 0); - string queueId = sqlDataReader.Read(taskInfoTable.QueueId, 1); + _ = sqlDataReader.Read(taskInfoTable.QueueId, 1); short status = sqlDataReader.Read(taskInfoTable.Status, 2); short taskTypeId = sqlDataReader.Read(taskInfoTable.TaskTypeId, 3); string taskRunId = sqlDataReader.Read(taskInfoTable.RunId, 4); diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskContextUpdater.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskContextUpdater.cs new file mode 100644 index 0000000000..35f0a85e81 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskContextUpdater.cs @@ -0,0 +1,67 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Threading; +using System.Threading.Tasks; +using EnsureThat; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.Logging; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Client; +using Microsoft.Health.SqlServer.Features.Storage; +using Microsoft.Health.TaskManagement; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Storage +{ + public class SqlServerTaskContextUpdater : IContextUpdater + { + private string _taskId; + private string _runId; + + private SqlConnectionWrapperFactory _sqlConnectionWrapperFactory; + private ILogger _logger; + + public SqlServerTaskContextUpdater( + string taskId, + string runId, + SqlConnectionWrapperFactory sqlConnectionWrapperFactory, + ILogger logger) + { + EnsureArg.IsNotNullOrEmpty(taskId, nameof(taskId)); + EnsureArg.IsNotNullOrEmpty(runId, nameof(runId)); + EnsureArg.IsNotNull(sqlConnectionWrapperFactory, nameof(sqlConnectionWrapperFactory)); + EnsureArg.IsNotNull(logger, nameof(logger)); + + _taskId = taskId; + _runId = runId; + _sqlConnectionWrapperFactory = sqlConnectionWrapperFactory; + _logger = logger; + } + + public async Task UpdateContextAsync(string context, CancellationToken cancellationToken) + { + using (SqlConnectionWrapper sqlConnectionWrapper = await _sqlConnectionWrapperFactory.ObtainSqlConnectionWrapperAsync(cancellationToken, true)) + using (SqlCommandWrapper sqlCommandWrapper = sqlConnectionWrapper.CreateSqlCommand()) + { + try + { + VLatest.UpdateTaskContext.PopulateCommand(sqlCommandWrapper, _taskId, context, _runId); + await sqlCommandWrapper.ExecuteNonQueryAsync(cancellationToken); + } + catch (SqlException sqlEx) + { + _logger.LogInformation(sqlEx, "Failed to update context."); + + if (sqlEx.Number == SqlErrorCodes.NotFound) + { + throw new TaskNotExistException(sqlEx.Message); + } + + throw; + } + } + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskContextUpdaterFactory.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskContextUpdaterFactory.cs new file mode 100644 index 0000000000..051e10e5ce --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskContextUpdaterFactory.cs @@ -0,0 +1,37 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using EnsureThat; +using Microsoft.Extensions.Logging; +using Microsoft.Health.SqlServer.Features.Client; +using Microsoft.Health.TaskManagement; + +namespace Microsoft.Health.Fhir.SqlServer.Features.Storage +{ + public class SqlServerTaskContextUpdaterFactory : IContextUpdaterFactory + { + private SqlConnectionWrapperFactory _sqlConnectionWrapperFactory; + private ILoggerFactory _loggerFactory; + + public SqlServerTaskContextUpdaterFactory( + SqlConnectionWrapperFactory sqlConnectionWrapperFactory, + ILoggerFactory loggerFactory) + { + EnsureArg.IsNotNull(sqlConnectionWrapperFactory, nameof(sqlConnectionWrapperFactory)); + EnsureArg.IsNotNull(loggerFactory, nameof(loggerFactory)); + + _sqlConnectionWrapperFactory = sqlConnectionWrapperFactory; + _loggerFactory = loggerFactory; + } + + public IContextUpdater CreateContextUpdater(string taskId, string runId) + { + EnsureArg.IsNotEmptyOrWhiteSpace(taskId, nameof(taskId)); + EnsureArg.IsNotEmptyOrWhiteSpace(runId, nameof(runId)); + + return new SqlServerTaskContextUpdater(taskId, runId, _sqlConnectionWrapperFactory, _loggerFactory.CreateLogger()); + } + } +} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskManager.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskManager.cs index 70d2d8c8d0..616fac7755 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskManager.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerTaskManager.cs @@ -33,14 +33,14 @@ public SqlServerTaskManager( _logger = logger; } - public async Task CreateTaskAsync(TaskInfo task, CancellationToken cancellationToken) + public async Task CreateTaskAsync(TaskInfo task, bool isUniqueTaskByType, CancellationToken cancellationToken) { using (SqlConnectionWrapper sqlConnectionWrapper = await _sqlConnectionWrapperFactory.ObtainSqlConnectionWrapperAsync(cancellationToken, true)) using (SqlCommandWrapper sqlCommandWrapper = sqlConnectionWrapper.CreateSqlCommand()) { try { - VLatest.CreateTask.PopulateCommand(sqlCommandWrapper, task.TaskId, task.QueueId, task.TaskTypeId, task.MaxRetryCount, task.InputData); + VLatest.CreateTask.PopulateCommand(sqlCommandWrapper, task.TaskId, task.QueueId, task.TaskTypeId, task.MaxRetryCount, task.InputData, isUniqueTaskByType); SqlDataReader sqlDataReader = await sqlCommandWrapper.ExecuteReaderAsync(cancellationToken); if (!sqlDataReader.Read()) @@ -138,44 +138,56 @@ public async Task CancelTaskAsync(string taskId, CancellationToken can using (SqlConnectionWrapper sqlConnectionWrapper = await _sqlConnectionWrapperFactory.ObtainSqlConnectionWrapperAsync(cancellationToken, true)) using (SqlCommandWrapper sqlCommandWrapper = sqlConnectionWrapper.CreateSqlCommand()) { - VLatest.CancelTask.PopulateCommand(sqlCommandWrapper, taskId); - SqlDataReader sqlDataReader = await sqlCommandWrapper.ExecuteReaderAsync(cancellationToken); - - if (!sqlDataReader.Read()) + try { - return null; - } + VLatest.CancelTask.PopulateCommand(sqlCommandWrapper, taskId); + SqlDataReader sqlDataReader = await sqlCommandWrapper.ExecuteReaderAsync(cancellationToken); - var taskInfoTable = VLatest.TaskInfo; + if (!sqlDataReader.Read()) + { + return null; + } - string id = sqlDataReader.Read(taskInfoTable.TaskId, 0); - string queueId = sqlDataReader.Read(taskInfoTable.QueueId, 1); - short status = sqlDataReader.Read(taskInfoTable.Status, 2); - short taskTypeId = sqlDataReader.Read(taskInfoTable.TaskTypeId, 3); - string taskRunId = sqlDataReader.Read(taskInfoTable.RunId, 4); - bool isCanceled = sqlDataReader.Read(taskInfoTable.IsCanceled, 5); - short retryCount = sqlDataReader.Read(taskInfoTable.RetryCount, 6); - short maxRetryCount = sqlDataReader.Read(taskInfoTable.MaxRetryCount, 7); - DateTime? heartbeatDateTime = sqlDataReader.Read(taskInfoTable.HeartbeatDateTime, 8); - string inputData = sqlDataReader.Read(taskInfoTable.InputData, 9); - string taskContext = sqlDataReader.Read(taskInfoTable.TaskContext, 10); - string result = sqlDataReader.Read(taskInfoTable.Result, 11); + var taskInfoTable = VLatest.TaskInfo; - return new TaskInfo() + string id = sqlDataReader.Read(taskInfoTable.TaskId, 0); + string queueId = sqlDataReader.Read(taskInfoTable.QueueId, 1); + short status = sqlDataReader.Read(taskInfoTable.Status, 2); + short taskTypeId = sqlDataReader.Read(taskInfoTable.TaskTypeId, 3); + string taskRunId = sqlDataReader.Read(taskInfoTable.RunId, 4); + bool isCanceled = sqlDataReader.Read(taskInfoTable.IsCanceled, 5); + short retryCount = sqlDataReader.Read(taskInfoTable.RetryCount, 6); + short maxRetryCount = sqlDataReader.Read(taskInfoTable.MaxRetryCount, 7); + DateTime? heartbeatDateTime = sqlDataReader.Read(taskInfoTable.HeartbeatDateTime, 8); + string inputData = sqlDataReader.Read(taskInfoTable.InputData, 9); + string taskContext = sqlDataReader.Read(taskInfoTable.TaskContext, 10); + string result = sqlDataReader.Read(taskInfoTable.Result, 11); + + return new TaskInfo() + { + TaskId = id, + QueueId = queueId, + Status = (TaskStatus)status, + TaskTypeId = taskTypeId, + RunId = taskRunId, + IsCanceled = isCanceled, + RetryCount = retryCount, + MaxRetryCount = maxRetryCount, + HeartbeatDateTime = heartbeatDateTime, + InputData = inputData, + Context = taskContext, + Result = result, + }; + } + catch (SqlException sqlEx) { - TaskId = id, - QueueId = queueId, - Status = (TaskStatus)status, - TaskTypeId = taskTypeId, - RunId = taskRunId, - IsCanceled = isCanceled, - RetryCount = retryCount, - MaxRetryCount = maxRetryCount, - HeartbeatDateTime = heartbeatDateTime, - InputData = inputData, - Context = taskContext, - Result = result, - }; + if (sqlEx.Number == SqlErrorCodes.NotFound) + { + throw new TaskNotExistException(sqlEx.Message); + } + + throw; + } } } } diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkDateTimeSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkDateTimeSearchParameterV1RowGenerator.cs index f8a7281d9b..3f696605bc 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkDateTimeSearchParameterV1RowGenerator.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkDateTimeSearchParameterV1RowGenerator.cs @@ -21,6 +21,9 @@ public BulkDateTimeSearchParameterV1RowGenerator(SqlServerFhirModel model, Searc internal override bool TryGenerateRow(int offset, short searchParamId, DateTimeSearchValue searchValue, out BulkDateTimeSearchParamTableTypeV1Row row) { + // For composite generator contains BulkDateTimeSearchParameterV1RowGenerator, it is possible to call TryGenerateRow before GenerateRow on this Generator. + EnsureInitialized(); + if (searchParamId == _lastUpdatedSearchParamId) { // this value is already stored on the Resource table. diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkSearchParameterRowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkSearchParameterRowGenerator.cs index cfbf1983de..0c6f5d0223 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkSearchParameterRowGenerator.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkSearchParameterRowGenerator.cs @@ -72,7 +72,7 @@ public virtual IEnumerable GenerateRows(IReadOnlyList inp } } - private void EnsureInitialized() + protected void EnsureInitialized() { if (Volatile.Read(ref _isInitialized)) { diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenSearchParameterV1RowGenerator.cs index a8a3708a4d..4993d7ef6f 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenSearchParameterV1RowGenerator.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenSearchParameterV1RowGenerator.cs @@ -20,6 +20,9 @@ public BulkTokenSearchParameterV1RowGenerator(SqlServerFhirModel model, SearchPa internal override bool TryGenerateRow(int offset, short searchParamId, TokenSearchValue searchValue, out BulkTokenSearchParamTableTypeV1Row row) { + // For composite generator contains BulkTokenSearchParameterV1RowGenerator, it is possible to call TryGenerateRow before GenerateRow on this Generator. + EnsureInitialized(); + // don't store if the code is empty or if this is the Resource _id parameter. The id is already maintained on the Resource table. if (string.IsNullOrWhiteSpace(searchValue.Code) || searchParamId == _resourceIdSearchParamId) diff --git a/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj b/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj index e647631c33..f8364db0d0 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj +++ b/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj @@ -45,7 +45,9 @@ - + + + true true Latest diff --git a/src/Microsoft.Health.Fhir.SqlServer/Registration/FhirServerBuilderSqlServerRegistrationExtensions.cs b/src/Microsoft.Health.Fhir.SqlServer/Registration/FhirServerBuilderSqlServerRegistrationExtensions.cs index 03bb25774d..479f0f59f3 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Registration/FhirServerBuilderSqlServerRegistrationExtensions.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Registration/FhirServerBuilderSqlServerRegistrationExtensions.cs @@ -11,6 +11,8 @@ using Microsoft.Health.Fhir.Core.Features.Search.Registry; using Microsoft.Health.Fhir.Core.Registration; using Microsoft.Health.Fhir.SqlServer.Features.Operations; +using Microsoft.Health.Fhir.SqlServer.Features.Operations.Import; +using Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator; using Microsoft.Health.Fhir.SqlServer.Features.Operations.Reindex; using Microsoft.Health.Fhir.SqlServer.Features.Schema; using Microsoft.Health.Fhir.SqlServer.Features.Search; @@ -113,6 +115,115 @@ public static IFhirServerBuilder AddSqlServer(this IFhirServerBuilder fhirServer .Singleton() .AsImplementedInterfaces(); + services.Add() + .Scoped() + .AsSelf() + .AsImplementedInterfaces(); + + services.Add() + .Scoped() + .AsSelf() + .AsImplementedInterfaces(); + + services.Add() + .Scoped() + .AsSelf() + .AsImplementedInterfaces(); + + services.Add() + .Scoped() + .AsSelf() + .AsImplementedInterfaces(); + + services.Add() + .Transient() + .AsSelf() + .AsImplementedInterfaces(); + + services.Add() + .Transient() + .AsSelf() + .AsImplementedInterfaces(); + + services.Add() + .Transient() + .AsSelf() + .AsImplementedInterfaces(); + + services.Add() + .Transient() + .AsSelf() + .AsImplementedInterfaces(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf(); + + services.Add() + .Transient() + .AsSelf() + .AsImplementedInterfaces(); + services.Add() .Transient() .AsImplementedInterfaces(); diff --git a/src/Microsoft.Health.Fhir.SqlServer/Resources.Designer.cs b/src/Microsoft.Health.Fhir.SqlServer/Resources.Designer.cs index 6ad3973fae..78961fcdcb 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Resources.Designer.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Resources.Designer.cs @@ -69,6 +69,15 @@ internal static string CyclicIncludeIterateNotSupported { } } + /// + /// Looks up a localized string similar to Failed to import resource with duplicated resource id {0}, line: {1}. + /// + internal static string FailedToImportForDuplicatedResource { + get { + return ResourceManager.GetString("FailedToImportForDuplicatedResource", resourceCulture); + } + } + /// /// Looks up a localized string similar to An error occurred when fetching resource changes from SQL database.. /// diff --git a/src/Microsoft.Health.Fhir.SqlServer/Resources.resx b/src/Microsoft.Health.Fhir.SqlServer/Resources.resx index 60581a87fa..ec7bd3b0e5 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Resources.resx +++ b/src/Microsoft.Health.Fhir.SqlServer/Resources.resx @@ -145,6 +145,10 @@ Both _type and _lastUpdated must have the same sort direction (_sort=_type,_lastUpdated or _sort=-_type,-_lastUpdated) + + Failed to import resource with duplicated resource id {0}, line: {1} + {0} is the resource id. {1} is the line number in file. + An error occurred when fetching resource changes from SQL database. diff --git a/src/Microsoft.Health.Fhir.Stu3.Client/Microsoft.Health.Fhir.Stu3.Client.csproj b/src/Microsoft.Health.Fhir.Stu3.Client/Microsoft.Health.Fhir.Stu3.Client.csproj index 67b21a5f73..8745920795 100644 --- a/src/Microsoft.Health.Fhir.Stu3.Client/Microsoft.Health.Fhir.Stu3.Client.csproj +++ b/src/Microsoft.Health.Fhir.Stu3.Client/Microsoft.Health.Fhir.Stu3.Client.csproj @@ -12,5 +12,8 @@ + + + diff --git a/src/Microsoft.Health.Fhir.Stu3.Web/Properties/launchSettings.json b/src/Microsoft.Health.Fhir.Stu3.Web/Properties/launchSettings.json index a1bd0ffb24..07944207ce 100644 --- a/src/Microsoft.Health.Fhir.Stu3.Web/Properties/launchSettings.json +++ b/src/Microsoft.Health.Fhir.Stu3.Web/Properties/launchSettings.json @@ -27,6 +27,10 @@ "SqlServer:SchemaOptions:AutomaticUpdatesEnabled": "true", "TestAuthEnvironment:FilePath": "..//..//testauthenvironment.json", "DataStore": "SqlServer", + "TaskHosting:Enabled": "true", + "TaskHosting:MaxRunningTaskCount": "2", + "FhirServer:Operations:IntegrationDataStore:StorageAccountConnection": "UseDevelopmentStorage=true", + "FhirServer:Operations:Import:Enabled": "true", "ASPNETCORE_ENVIRONMENT": "development" }, "applicationUrl": "https://localhost:44348/" diff --git a/src/Microsoft.Health.Fhir.Tests.Common/Categories.cs b/src/Microsoft.Health.Fhir.Tests.Common/Categories.cs index ead04af514..7a12515a50 100644 --- a/src/Microsoft.Health.Fhir.Tests.Common/Categories.cs +++ b/src/Microsoft.Health.Fhir.Tests.Common/Categories.cs @@ -31,6 +31,8 @@ public static class Categories public const string AnonymizedExport = "AnonymizedExport"; + public const string Import = "Import"; + public const string ExportDataValidation = "ExportDataValidation"; public const string ExportLongRunning = "ExportLongRunning"; diff --git a/src/Microsoft.Health.Fhir.Tests.Common/Microsoft.Health.Fhir.Tests.Common.csproj b/src/Microsoft.Health.Fhir.Tests.Common/Microsoft.Health.Fhir.Tests.Common.csproj index 6120cbdf1b..4d6307f61c 100644 --- a/src/Microsoft.Health.Fhir.Tests.Common/Microsoft.Health.Fhir.Tests.Common.csproj +++ b/src/Microsoft.Health.Fhir.Tests.Common/Microsoft.Health.Fhir.Tests.Common.csproj @@ -40,6 +40,10 @@ + + + + @@ -250,6 +254,10 @@ + + + + @@ -442,6 +450,7 @@ + diff --git a/src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-DupPatientTemplate.ndjson b/src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-DupPatientTemplate.ndjson new file mode 100644 index 0000000000..ade662846b --- /dev/null +++ b/src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-DupPatientTemplate.ndjson @@ -0,0 +1,2 @@ +{"resourceType":"Patient","id":"##PatientID##","meta":{"versionId":"1","lastUpdated":"2020-11-16T05:20:30.681+00:00","tag":[{"system":"http://terminology.hl7.org/CodeSystem/v3-ActReason","code":"HTEST","display":"test health data"}]},"text":{"status":"generated","div":"
\n \n

Patient Donald DUCK @ Acme Healthcare, Inc. MR = 654321

\n \n
"},"identifier":[{"use":"usual","type":{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0203","code":"MR"}]},"system":"urn:oid:0.1.2.3.4.5.6.7","value":"654321"}],"active":true,"name":[{"use":"official","family":"Donald","given":["Duck"]}],"gender":"male","photo":[{"contentType":"image/gif","data":"R0lGODlhEwARAPcAAAAAAAAA/+9aAO+1AP/WAP/eAP/eCP/eEP/eGP/nAP/nCP/nEP/nIf/nKf/nUv/nWv/vAP/vCP/vEP/vGP/vIf/vKf/vMf/vOf/vWv/vY//va//vjP/3c//3lP/3nP//tf//vf///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////yH5BAEAAAEALAAAAAATABEAAAi+AAMIDDCgYMGBCBMSvMCQ4QCFCQcwDBGCA4cLDyEGECDxAoAQHjxwyKhQAMeGIUOSJJjRpIAGDS5wCDly4AALFlYOgHlBwwOSNydM0AmzwYGjBi8IHWoTgQYORg8QIGDAwAKhESI8HIDgwQaRDI1WXXAhK9MBBzZ8/XDxQoUFZC9IiCBh6wEHGz6IbNuwQoSpWxEgyLCXL8O/gAnylNlW6AUEBRIL7Og3KwQIiCXb9HsZQoIEUzUjNEiaNMKAAAA7"}],"contact":[{"relationship":[{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0131","code":"E"}]}],"organization":{"reference":"Organization/1","display":"Walt Disney Corporation"}}],"managingOrganization":{"reference":"Organization/1","display":"ACME Healthcare, Inc"},"link":[{"other":{"reference":"Patient/pat2"},"type":"seealso"}]} +{"resourceType":"Patient","id":"##PatientID##","meta":{"versionId":"1","lastUpdated":"2020-11-16T05:20:30.681+00:00","tag":[{"system":"http://terminology.hl7.org/CodeSystem/v3-ActReason","code":"HTEST","display":"test health data"}]},"text":{"status":"generated","div":"
\n \n

Patient Donald DUCK @ Acme Healthcare, Inc. MR = 654321

\n \n
"},"identifier":[{"use":"usual","type":{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0203","code":"MR"}]},"system":"urn:oid:0.1.2.3.4.5.6.7","value":"654321"}],"active":true,"name":[{"use":"official","family":"Donald","given":["Duck"]}],"gender":"male","photo":[{"contentType":"image/gif","data":"R0lGODlhEwARAPcAAAAAAAAA/+9aAO+1AP/WAP/eAP/eCP/eEP/eGP/nAP/nCP/nEP/nIf/nKf/nUv/nWv/vAP/vCP/vEP/vGP/vIf/vKf/vMf/vOf/vWv/vY//va//vjP/3c//3lP/3nP//tf//vf///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////yH5BAEAAAEALAAAAAATABEAAAi+AAMIDDCgYMGBCBMSvMCQ4QCFCQcwDBGCA4cLDyEGECDxAoAQHjxwyKhQAMeGIUOSJJjRpIAGDS5wCDly4AALFlYOgHlBwwOSNydM0AmzwYGjBi8IHWoTgQYORg8QIGDAwAKhESI8HIDgwQaRDI1WXXAhK9MBBzZ8/XDxQoUFZC9IiCBh6wEHGz6IbNuwQoSpWxEgyLCXL8O/gAnylNlW6AUEBRIL7Og3KwQIiCXb9HsZQoIEUzUjNEiaNMKAAAA7"}],"contact":[{"relationship":[{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0131","code":"E"}]}],"organization":{"reference":"Organization/1","display":"Walt Disney Corporation"}}],"managingOrganization":{"reference":"Organization/1","display":"ACME Healthcare, Inc"},"link":[{"other":{"reference":"Patient/pat2"},"type":"seealso"}]} diff --git a/src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-InvalidPatient.ndjson b/src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-InvalidPatient.ndjson new file mode 100644 index 0000000000..0a2bb4ed2d --- /dev/null +++ b/src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-InvalidPatient.ndjson @@ -0,0 +1,5 @@ +{"resourceType":"Patient","id":"##PatientID##","meta":{"versionId":"1","lastUpdated":"2020-11-16T05:20:30.681+00:00","tag":[{"system":"http://terminology.hl7.org/CodeSystem/v3-ActReason","code":"HTEST","display":"test health data"}]},"text":{"status":"generated","div":"
\n \n

Patient Donald DUCK @ Acme Healthcare, Inc. MR = 654321

\n \n
"},"identifier":[{"use":"usual","type":{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0203","code":"MR"}]},"system":"urn:oid:0.1.2.3.4.5.6.7","value":"654321"}],"active":true,"name":[{"use":"official","family":"Donald","given":["Duck"]}],"gender":"male","photo":[{"contentType":"image/gif","data":"R0lGODlhEwARAPcAAAAAAAAA/+9aAO+1AP/WAP/eAP/eCP/eEP/eGP/nAP/nCP/nEP/nIf/nKf/nUv/nWv/vAP/vCP/vEP/vGP/vIf/vKf/vMf/vOf/vWv/vY//va//vjP/3c//3lP/3nP//tf//vf///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////yH5BAEAAAEALAAAAAATABEAAAi+AAMIDDCgYMGBCBMSvMCQ4QCFCQcwDBGCA4cLDyEGECDxAoAQHjxwyKhQAMeGIUOSJJjRpIAGDS5wCDly4AALFlYOgHlBwwOSNydM0AmzwYGjBi8IHWoTgQYORg8QIGDAwAKhESI8HIDgwQaRDI1WXXAhK9MBBzZ8/XDxQoUFZC9IiCBh6wEHGz6IbNuwQoSpWxEgyLCXL8O/gAnylNlW6AUEBRIL7Og3KwQIiCXb9HsZQoIEUzUjNEiaNMKAAAA7"}],"contact":[{"relationship":[{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0131","code":"E"}]}],"organization":{"reference":"Organization/1","display":"Walt Disney Corporation"}}],"managingOrganization":{"reference":"Organization/1","display":"ACME Healthcare, Inc"},"link":[{"other":{"reference":"Patient/pat2"},"type":"seealso"}]} +{"resourceType":"Patient","id":"##PatientID##","meta":{"versionId":"1","lastUpdated":"2020-11-16T05:20:57.871+00:00","tag":[{"system":"http://terminology.hl7.org/CodeSystem/v3-ActReason","code":"HTEST","display":"test health data"}]},"text":{"status":"generated","div":"
\n\t\t\t\n\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\n\t\t\t
NamePeter James \n Chalmers ("Jim")\n
Address534 Erewhon, Pleasantville, Vic, 3999
ContactsHome: unknown. Work: (03) 5555 6473
IdMRN: 12345 (Acme Healthcare)
\n\t\t
"},"identifier":[{"use":"usual","type":{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0203","code":"MR"}]},"system":"urn:oid:1.2.36.146.595.217.0.1","value":"12345","period":{"start":"2001-05-06"},"assigner":{"display":"Acme Healthcare"}}],"active":true,"name":[{"use":"official","family":"Chalmers","given":["Peter","James"]},{"use":"usual","given":["Jim"]},{"use":"maiden","family":"Windsor","given":["Peter","James"],"period":{"end":"2002"}}],"telecom":[{"use":"home"},{"system":"phone","value":"(03) 5555 6473","use":"work","rank":1},{"system":"phone","value":"(03) 3410 5613","use":"mobile","rank":2},{"system":"phone","value":"(03) 5555 8834","use":"old","period":{"end":"2014"}}],"gender":"male","birthDate":"1974-12-25","_birthDate":{"extension":[{"url":"http://hl7.org/fhir/StructureDefinition/patient-birthTime","valueDateTime":"1974-12-25T14:35:45-05:00"}]},"deceasedBoolean":false,"address":[{"use":"home","type":"both","text":"534 Erewhon St PeasantVille, Rainbow, Vic 3999","line":["534 Erewhon St"],"city":"PleasantVille","district":"Rainbow","state":"Vic","postalCode":"3999","period":{"start":"1974-12-25"}}],"contact":[{"relationship":[{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0131","code":"N"}]}],"name":{"family":"du Marché","_family":{"extension":[{"url":"http://hl7.org/fhir/StructureDefinition/humanname-own-prefix","valueString":"VV"}]},"given":["Bénédicte"]},"telecom":[{"system":"phone","value":"+33 (237) 998327"}],"address":{"use":"home","type":"both","line":["534 Erewhon St"],"city":"PleasantVille","district":"Rainbow","state":"Vic","postalCode":"3999","period":{"start":"1974-12-25"}},"gender":"female","period":{"start":"2012"}}],"managingOrganization":{"reference":"Organization/1"}} +{"resourceType":"Patient","id":"##PatientID##","meta":{"versionId":"1","lastUpdated":"2020-11-16T05:21:08.491+00:00","tag":[{"system":"http://terminology.hl7.org/CodeSystem/v3-ActReason","code":"HTEST","display":"test health data"}]},"text":{"status":"generated","div":"
Everywoman, Eve. SSN:\n 444222222
"},"identifier":[{"type":{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0203","code":"SS"}]},"system":"http://hl7.org/fhir/sid/us-ssn","value":"444222222"}],"active":true,"name":[{"use":"official","family":"Everywoman","given":["Eve"]}],"telecom":[{"system":"phone","value":"555-555-2003","use":"work"}],"gender":"female","birthDate":"1973-05-31","address":[{"use":"home","line":["2222 Home Street"]}],"managingOrganization":{"reference":"Organization/hl7"}} +invalid-row +{"resourceType":"Patient","id":"##PatientID##","meta":{"versionId":"1","lastUpdated":"2020-11-16T05:21:08.491+00:00","tag":[{"system":"http://terminology.hl7.org/CodeSystem/v3-ActReason","code":"HTEST","display":"test health data"}]},"text":{"status":"generated","div":"
Everywoman, Eve. SSN:\n 444222222
"},"identifier":[{"type":{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0203","code":"SS"}]},"system":"http://hl7.org/fhir/sid/us-ssn","value":"444222222"}],"active":true,"telecom":[{"system":"phone","value":"555-555-2003","use":"work"}],"gender":"female","birthDate":"1973-05-31","address":[{"use":"home","line":["2222 Home Street"]}],"managingOrganization":{"reference":"Organization/hl7"}} diff --git a/src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-Patient.ndjson b/src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-Patient.ndjson new file mode 100644 index 0000000000..bd6c190580 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-Patient.ndjson @@ -0,0 +1,4 @@ +{"resourceType":"Patient","id":"##PatientID##","meta":{"versionId":"1","lastUpdated":"2020-11-16T05:20:30.681+00:00","tag":[{"system":"http://terminology.hl7.org/CodeSystem/v3-ActReason","code":"HTEST","display":"test health data"}]},"text":{"status":"generated","div":"
\n \n

Patient Donald DUCK @ Acme Healthcare, Inc. MR = 654321

\n \n
"},"identifier":[{"use":"usual","type":{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0203","code":"MR"}]},"system":"urn:oid:0.1.2.3.4.5.6.7","value":"654321"}],"active":true,"name":[{"use":"official","family":"Donald","given":["Duck"]}],"gender":"male","photo":[{"contentType":"image/gif","data":"R0lGODlhEwARAPcAAAAAAAAA/+9aAO+1AP/WAP/eAP/eCP/eEP/eGP/nAP/nCP/nEP/nIf/nKf/nUv/nWv/vAP/vCP/vEP/vGP/vIf/vKf/vMf/vOf/vWv/vY//va//vjP/3c//3lP/3nP//tf//vf///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////yH5BAEAAAEALAAAAAATABEAAAi+AAMIDDCgYMGBCBMSvMCQ4QCFCQcwDBGCA4cLDyEGECDxAoAQHjxwyKhQAMeGIUOSJJjRpIAGDS5wCDly4AALFlYOgHlBwwOSNydM0AmzwYGjBi8IHWoTgQYORg8QIGDAwAKhESI8HIDgwQaRDI1WXXAhK9MBBzZ8/XDxQoUFZC9IiCBh6wEHGz6IbNuwQoSpWxEgyLCXL8O/gAnylNlW6AUEBRIL7Og3KwQIiCXb9HsZQoIEUzUjNEiaNMKAAAA7"}],"contact":[{"relationship":[{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0131","code":"E"}]}],"organization":{"reference":"Organization/1","display":"Walt Disney Corporation"}}],"managingOrganization":{"reference":"Organization/1","display":"ACME Healthcare, Inc"},"link":[{"other":{"reference":"Patient/pat2"},"type":"seealso"}]} +{"resourceType":"Patient","id":"##PatientID##","meta":{"versionId":"1","lastUpdated":"2020-11-16T05:20:57.871+00:00","tag":[{"system":"http://terminology.hl7.org/CodeSystem/v3-ActReason","code":"HTEST","display":"test health data"}]},"text":{"status":"generated","div":"
\n\t\t\t\n\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\n\t\t\t
NamePeter James \n Chalmers ("Jim")\n
Address534 Erewhon, Pleasantville, Vic, 3999
ContactsHome: unknown. Work: (03) 5555 6473
IdMRN: 12345 (Acme Healthcare)
\n\t\t
"},"identifier":[{"use":"usual","type":{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0203","code":"MR"}]},"system":"urn:oid:1.2.36.146.595.217.0.1","value":"12345","period":{"start":"2001-05-06"},"assigner":{"display":"Acme Healthcare"}}],"active":true,"name":[{"use":"official","family":"Chalmers","given":["Peter","James"]},{"use":"usual","given":["Jim"]},{"use":"maiden","family":"Windsor","given":["Peter","James"],"period":{"end":"2002"}}],"telecom":[{"use":"home"},{"system":"phone","value":"(03) 5555 6473","use":"work","rank":1},{"system":"phone","value":"(03) 3410 5613","use":"mobile","rank":2},{"system":"phone","value":"(03) 5555 8834","use":"old","period":{"end":"2014"}}],"gender":"male","birthDate":"1974-12-25","_birthDate":{"extension":[{"url":"http://hl7.org/fhir/StructureDefinition/patient-birthTime","valueDateTime":"1974-12-25T14:35:45-05:00"}]},"deceasedBoolean":false,"address":[{"use":"home","type":"both","text":"534 Erewhon St PeasantVille, Rainbow, Vic 3999","line":["534 Erewhon St"],"city":"PleasantVille","district":"Rainbow","state":"Vic","postalCode":"3999","period":{"start":"1974-12-25"}}],"contact":[{"relationship":[{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0131","code":"N"}]}],"name":{"family":"du Marché","_family":{"extension":[{"url":"http://hl7.org/fhir/StructureDefinition/humanname-own-prefix","valueString":"VV"}]},"given":["Bénédicte"]},"telecom":[{"system":"phone","value":"+33 (237) 998327"}],"address":{"use":"home","type":"both","line":["534 Erewhon St"],"city":"PleasantVille","district":"Rainbow","state":"Vic","postalCode":"3999","period":{"start":"1974-12-25"}},"gender":"female","period":{"start":"2012"}}],"managingOrganization":{"reference":"Organization/1"}} +{"resourceType":"Patient","id":"##PatientID##","meta":{"versionId":"1","lastUpdated":"2020-11-16T05:21:08.491+00:00","tag":[{"system":"http://terminology.hl7.org/CodeSystem/v3-ActReason","code":"HTEST","display":"test health data"}]},"text":{"status":"generated","div":"
Everywoman, Eve. SSN:\n 444222222
"},"identifier":[{"type":{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0203","code":"SS"}]},"system":"http://hl7.org/fhir/sid/us-ssn","value":"444222222"}],"active":true,"name":[{"use":"official","family":"Everywoman","given":["Eve"]}],"telecom":[{"system":"phone","value":"555-555-2003","use":"work"}],"gender":"female","birthDate":"1973-05-31","address":[{"use":"home","line":["2222 Home Street"]}],"managingOrganization":{"reference":"Organization/hl7"}} +{"resourceType":"Patient","id":"##PatientID##","meta":{"versionId":"1","lastUpdated":"2020-11-16T05:21:08.491+00:00","tag":[{"system":"http://terminology.hl7.org/CodeSystem/v3-ActReason","code":"HTEST","display":"test health data"}]},"text":{"status":"generated","div":"
Everywoman, Eve. SSN:\n 444222222
"},"identifier":[{"type":{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0203","code":"SS"}]},"system":"http://hl7.org/fhir/sid/us-ssn","value":"444222222"}],"active":true,"telecom":[{"system":"phone","value":"555-555-2003","use":"work"}],"gender":"female","birthDate":"1973-05-31","address":[{"use":"home","line":["2222 Home Street"]}],"managingOrganization":{"reference":"Organization/hl7"}} diff --git a/src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-SinglePatientTemplate.ndjson b/src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-SinglePatientTemplate.ndjson new file mode 100644 index 0000000000..c233e11fa6 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Tests.Common/TestFiles/Normative/Import-SinglePatientTemplate.ndjson @@ -0,0 +1 @@ +{"resourceType":"Patient","id":"##PatientID##","meta":{"versionId":"1","lastUpdated":"2020-11-16T05:20:30.681+00:00","tag":[{"system":"http://terminology.hl7.org/CodeSystem/v3-ActReason","code":"HTEST","display":"test health data"}]},"text":{"status":"generated","div":"
\n \n

Patient Donald DUCK @ Acme Healthcare, Inc. MR = 654321

\n \n
"},"identifier":[{"use":"usual","type":{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0203","code":"MR"}]},"system":"urn:oid:0.1.2.3.4.5.6.7","value":"654321"}],"active":true,"name":[{"use":"official","family":"Donald","given":["Duck"]}],"gender":"male","photo":[{"contentType":"image/gif","data":"R0lGODlhEwARAPcAAAAAAAAA/+9aAO+1AP/WAP/eAP/eCP/eEP/eGP/nAP/nCP/nEP/nIf/nKf/nUv/nWv/vAP/vCP/vEP/vGP/vIf/vKf/vMf/vOf/vWv/vY//va//vjP/3c//3lP/3nP//tf//vf///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////yH5BAEAAAEALAAAAAATABEAAAi+AAMIDDCgYMGBCBMSvMCQ4QCFCQcwDBGCA4cLDyEGECDxAoAQHjxwyKhQAMeGIUOSJJjRpIAGDS5wCDly4AALFlYOgHlBwwOSNydM0AmzwYGjBi8IHWoTgQYORg8QIGDAwAKhESI8HIDgwQaRDI1WXXAhK9MBBzZ8/XDxQoUFZC9IiCBh6wEHGz6IbNuwQoSpWxEgyLCXL8O/gAnylNlW6AUEBRIL7Og3KwQIiCXb9HsZQoIEUzUjNEiaNMKAAAA7"}],"contact":[{"relationship":[{"coding":[{"system":"http://terminology.hl7.org/CodeSystem/v2-0131","code":"E"}]}],"organization":{"reference":"Organization/1","display":"Walt Disney Corporation"}}],"managingOrganization":{"reference":"Organization/1","display":"ACME Healthcare, Inc"},"link":[{"other":{"reference":"Patient/pat2"},"type":"seealso"}]} diff --git a/src/Microsoft.Health.Fhir.ValueSets/AuditEventSubType.cs b/src/Microsoft.Health.Fhir.ValueSets/AuditEventSubType.cs index 4d8d722d2d..817326e3f0 100644 --- a/src/Microsoft.Health.Fhir.ValueSets/AuditEventSubType.cs +++ b/src/Microsoft.Health.Fhir.ValueSets/AuditEventSubType.cs @@ -63,6 +63,8 @@ public static class AuditEventSubType public const string Validate = "validate"; + public const string Import = "import"; + public const string MemberMatch = "member-match"; public const string Everything = "everything"; diff --git a/src/Microsoft.Health.TaskManagement.UnitTests/TaskHostingTests.cs b/src/Microsoft.Health.TaskManagement.UnitTests/TaskHostingTests.cs index b6dc91f478..ce453ce2fb 100644 --- a/src/Microsoft.Health.TaskManagement.UnitTests/TaskHostingTests.cs +++ b/src/Microsoft.Health.TaskManagement.UnitTests/TaskHostingTests.cs @@ -211,6 +211,7 @@ public async Task GivenTaskCrash_WhenTaskHostingRepickupTask_ThenFirstTasksShoul TestTaskConsumer consumer = new TestTaskConsumer(taskInfos.ToArray()); bool isCancelled = false; + CancellationTokenSource tokenSource = new CancellationTokenSource(); TestTaskFactory factory = new TestTaskFactory(t => { return new TestTask( @@ -223,6 +224,7 @@ public async Task GivenTaskCrash_WhenTaskHostingRepickupTask_ThenFirstTasksShoul { await Task.Delay(TimeSpan.FromMilliseconds(20)); isCancelled = true; + tokenSource.Cancel(); }) { RunId = Guid.NewGuid().ToString(), @@ -231,8 +233,7 @@ public async Task GivenTaskCrash_WhenTaskHostingRepickupTask_ThenFirstTasksShoul TaskHosting taskHosting = new TaskHosting(consumer, factory, _logger); taskHosting.PollingFrequencyInSeconds = 0; - - CancellationTokenSource tokenSource = new CancellationTokenSource(); + taskHosting.TaskHeartbeatTimeoutThresholdInSeconds = 0; tokenSource.CancelAfter(TimeSpan.FromSeconds(5)); await taskHosting.StartAsync(tokenSource); @@ -377,6 +378,49 @@ public async Task GivenTaskWithRetriableException_WhenTaskHostingStart_ThenTaskS Assert.Equal(TaskResult.Success, taskResult1.Result); } + [Fact] + public async Task GivenTaskRunning_WhenCancel_ThenTaskShouldBeCompleteWithCancelledStatus() + { + TaskInfo taskInfo0 = new TaskInfo(); + taskInfo0.TaskId = Guid.NewGuid().ToString(); + taskInfo0.TaskTypeId = 0; + + TestTaskConsumer consumer = new TestTaskConsumer(new TaskInfo[] { taskInfo0 }); + CancellationTokenSource tokenSource = new CancellationTokenSource(); + AutoResetEvent autoResetEvent1 = new AutoResetEvent(false); + AutoResetEvent autoResetEvent2 = new AutoResetEvent(false); + TestTaskFactory factory = new TestTaskFactory(t => + { + return new TestTask( + () => + { + autoResetEvent2.Set(); + autoResetEvent1.WaitOne(); + + tokenSource.Cancel(); + + return Task.FromResult(new TaskResultData(TaskResult.Canceled, string.Empty)); + }, + () => + { + autoResetEvent1.Set(); + }); + }); + + TaskHosting taskHosting = new TaskHosting(consumer, factory, _logger); + taskHosting.PollingFrequencyInSeconds = 0; + taskHosting.MaxRunningTaskCount = 1; + + Task hostingTask = taskHosting.StartAsync(tokenSource); + autoResetEvent2.WaitOne(); + taskInfo0.IsCanceled = true; + + await hostingTask; + + TaskResultData taskResult = JsonConvert.DeserializeObject(taskInfo0.Result); + Assert.Equal(TaskResult.Canceled, taskResult.Result); + } + [Fact(Skip = "Fault injection test require local environment.")] public async Task GivenTaskThrowException_WhenTaskHostingStart_ThenTaskHostingShouldKeepRunning() { diff --git a/src/Microsoft.Health.TaskManagement.UnitTests/TestTask.cs b/src/Microsoft.Health.TaskManagement.UnitTests/TestTask.cs index d44e69b025..f9859ab583 100644 --- a/src/Microsoft.Health.TaskManagement.UnitTests/TestTask.cs +++ b/src/Microsoft.Health.TaskManagement.UnitTests/TestTask.cs @@ -12,7 +12,6 @@ public class TestTask : ITask { private Func> _executeFunc; private Action _cancelAction; - private bool _isCancelling = false; public TestTask(Func> executeFunc, Action cancelAction) { @@ -30,13 +29,6 @@ public Task ExecuteAsync() public void Cancel() { _cancelAction?.Invoke(); - - _isCancelling = true; - } - - public bool IsCancelling() - { - return _isCancelling; } public void Dispose() diff --git a/src/Microsoft.Health.TaskManagement/IContextUpdater.cs b/src/Microsoft.Health.TaskManagement/IContextUpdater.cs index 0e972eae71..d48c14be69 100644 --- a/src/Microsoft.Health.TaskManagement/IContextUpdater.cs +++ b/src/Microsoft.Health.TaskManagement/IContextUpdater.cs @@ -3,6 +3,7 @@ // Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. // ------------------------------------------------------------------------------------------------- +using System.Threading; using System.Threading.Tasks; namespace Microsoft.Health.TaskManagement @@ -15,9 +16,8 @@ public interface IContextUpdater /// /// Update context for the task. /// - /// Id for the task /// Task context in string format - /// Task infomation after context updated. - public Task UpdateContextAsync(string taskId, string context); + /// Cancellation Token + public Task UpdateContextAsync(string context, CancellationToken cancellationToken); } } diff --git a/src/Microsoft.Health.TaskManagement/IContextUpdaterFactory.cs b/src/Microsoft.Health.TaskManagement/IContextUpdaterFactory.cs new file mode 100644 index 0000000000..9409c13c0a --- /dev/null +++ b/src/Microsoft.Health.TaskManagement/IContextUpdaterFactory.cs @@ -0,0 +1,20 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +namespace Microsoft.Health.TaskManagement +{ + /// + /// Factory for task context updater + /// + public interface IContextUpdaterFactory + { + /// + /// Create task context updater from taskId and runId + /// + /// Task id + /// Current task run id + public IContextUpdater CreateContextUpdater(string taskId, string runId); + } +} diff --git a/src/Microsoft.Health.TaskManagement/ITask.cs b/src/Microsoft.Health.TaskManagement/ITask.cs index bf176c3be2..0ab3bc282d 100644 --- a/src/Microsoft.Health.TaskManagement/ITask.cs +++ b/src/Microsoft.Health.TaskManagement/ITask.cs @@ -28,11 +28,5 @@ public interface ITask : IDisposable /// Cancel the task execution. /// public void Cancel(); - - /// - /// Check if the task is in cancelling status. - /// - /// return true if cancel operation triggered. - public bool IsCancelling(); } } diff --git a/src/Microsoft.Health.TaskManagement/ITaskManager.cs b/src/Microsoft.Health.TaskManagement/ITaskManager.cs index b330f290ce..6905dbbd16 100644 --- a/src/Microsoft.Health.TaskManagement/ITaskManager.cs +++ b/src/Microsoft.Health.TaskManagement/ITaskManager.cs @@ -17,9 +17,10 @@ public interface ITaskManager /// Create task for task information /// /// Task information. + /// Only create task only if there's no active task with same type. /// Cancellation Token /// Task information after created. - public Task CreateTaskAsync(TaskInfo task, CancellationToken cancellationToken); + public Task CreateTaskAsync(TaskInfo task, bool isUniqueTaskByType, CancellationToken cancellationToken); /// /// Get task information by id. diff --git a/src/Microsoft.Health.TaskManagement/TaskHosting.cs b/src/Microsoft.Health.TaskManagement/TaskHosting.cs index a4abeff4fe..ac81b6e74d 100644 --- a/src/Microsoft.Health.TaskManagement/TaskHosting.cs +++ b/src/Microsoft.Health.TaskManagement/TaskHosting.cs @@ -113,7 +113,13 @@ private async Task ExecuteTaskAsync(TaskInfo taskInfo, CancellationToken cancell { try { - Task runningTask = task.ExecuteAsync(); + if (taskInfo.IsCanceled) + { + // For cancelled task, try to execute it for potential cleanup. + task.Cancel(); + } + + Task runningTask = Task.Run(() => task.ExecuteAsync()); _activeTaskRecordsForKeepAlive[taskInfo.TaskId] = task; result = await runningTask; @@ -162,18 +168,13 @@ private async Task KeepAliveTasksAsync(CancellationToken cancellationToken) while (!cancellationToken.IsCancellationRequested) { - Task intervalDelayTask = Task.Delay(TaskHeartbeatIntervalInSeconds, CancellationToken.None); + Task intervalDelayTask = Task.Delay(TimeSpan.FromSeconds(TaskHeartbeatIntervalInSeconds), CancellationToken.None); KeyValuePair[] activeTaskRecords = _activeTaskRecordsForKeepAlive.ToArray(); foreach ((string taskId, ITask task) in activeTaskRecords) { try { - if (task.IsCancelling()) - { - continue; - } - bool shouldCancel = false; try { diff --git a/src/Microsoft.Health.TaskManagement/TaskResultData.cs b/src/Microsoft.Health.TaskManagement/TaskResultData.cs index d966ae8dad..09b036df40 100644 --- a/src/Microsoft.Health.TaskManagement/TaskResultData.cs +++ b/src/Microsoft.Health.TaskManagement/TaskResultData.cs @@ -9,6 +9,10 @@ namespace Microsoft.Health.TaskManagement { public class TaskResultData { + public TaskResultData() + { + } + public TaskResultData(TaskResult result, string resultData) { EnsureArg.IsNotNull(resultData, nameof(resultData)); diff --git a/test/Microsoft.Health.Fhir.R4.Tests.E2E/Microsoft.Health.Fhir.R4.Tests.E2E.csproj b/test/Microsoft.Health.Fhir.R4.Tests.E2E/Microsoft.Health.Fhir.R4.Tests.E2E.csproj index a7220b6863..771916e660 100644 --- a/test/Microsoft.Health.Fhir.R4.Tests.E2E/Microsoft.Health.Fhir.R4.Tests.E2E.csproj +++ b/test/Microsoft.Health.Fhir.R4.Tests.E2E/Microsoft.Health.Fhir.R4.Tests.E2E.csproj @@ -38,6 +38,7 @@ + diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E.Common/TestUsers.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E.Common/TestUsers.cs index d4bc326621..4be5f362be 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E.Common/TestUsers.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E.Common/TestUsers.cs @@ -5,6 +5,12 @@ namespace Microsoft.Health.Fhir.Tests.E2E.Common { + /* + * When adding a new user they must be added in the following locations: + * - /build/jobs/run-tests.yml DotNetCoreCLI@2 Tasks + * - /testauthenvironment.json + */ + public static class TestUsers { public static TestUser ReadOnlyUser { get; } = new TestUser("globalReaderUser"); @@ -15,6 +21,8 @@ public static class TestUsers public static TestUser ConvertDataUser { get; } = new TestUser("globalConverterUser"); + public static TestUser BulkImportUser { get; } = new TestUser("globalImporterUser"); + public static TestUser AdminUser { get; } = new TestUser("globalAdminUser"); } } diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Microsoft.Health.Fhir.Shared.Tests.E2E.projitems b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Microsoft.Health.Fhir.Shared.Tests.E2E.projitems index 972997686f..89e3ddba2e 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Microsoft.Health.Fhir.Shared.Tests.E2E.projitems +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Microsoft.Health.Fhir.Shared.Tests.E2E.projitems @@ -31,6 +31,28 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Microsoft.Health.Fhir.Shared.Tests.E2E.shproj b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Microsoft.Health.Fhir.Shared.Tests.E2E.shproj index 5d107dae05..da6f2e8f59 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Microsoft.Health.Fhir.Shared.Tests.E2E.shproj +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Microsoft.Health.Fhir.Shared.Tests.E2E.shproj @@ -14,5 +14,6 @@ + \ No newline at end of file diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/BatchTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/BatchTests.cs index eb22a188c9..7a78f0f2c5 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/BatchTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/BatchTests.cs @@ -9,7 +9,6 @@ using Hl7.Fhir.Model; using Microsoft.Health.Fhir.Client; using Microsoft.Health.Fhir.Core.Extensions; -using Microsoft.Health.Fhir.Smart.Tests.E2E; using Microsoft.Health.Fhir.Tests.Common; using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; using Microsoft.Health.Fhir.Tests.E2E.Common; diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/ExportDataValidationTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/ExportDataValidationTests.cs index 3ef2e84fc9..dcd1addf3f 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/ExportDataValidationTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/ExportDataValidationTests.cs @@ -10,7 +10,6 @@ using Hl7.Fhir.Serialization; using Microsoft.Health.Fhir.Core.Features.Operations.Export; using Microsoft.Health.Fhir.Core.Models; -using Microsoft.Health.Fhir.Shared.Tests.E2E.Rest; using Microsoft.Health.Fhir.Tests.Common; using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; using Microsoft.Health.Fhir.Tests.E2E.Common; diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportBasicSearchTestFixture.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportBasicSearchTestFixture.cs new file mode 100644 index 0000000000..d12cb9f6c2 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportBasicSearchTestFixture.cs @@ -0,0 +1,60 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + public class ImportBasicSearchTestFixture : ImportTestFixture + { + public ImportBasicSearchTestFixture(DataStore dataStore, Format format, TestFhirServerFactory testFhirServerFactory) + : base(dataStore, format, testFhirServerFactory) + { + PatientAddressCityAndFamily.Address = new List
() + { + new Address() { City = Guid.NewGuid().ToString("N") }, + }; + PatientAddressCityAndFamily.Name = new List() + { + new HumanName() { Family = Guid.NewGuid().ToString("N") }, + }; + + string cityName = Guid.NewGuid().ToString("N"); + PatientWithSameCity1.Address = new List
() + { + new Address() { City = cityName }, + }; + PatientWithSameCity2.Address = new List
() + { + new Address() { City = cityName }, + }; + } + + public Patient PatientAddressCityAndFamily { get; set; } = new Patient() { Id = Guid.NewGuid().ToString("N") }; + + public Patient PatientWithSameCity1 { get; set; } = new Patient() { Id = Guid.NewGuid().ToString("N") }; + + public Patient PatientWithSameCity2 { get; set; } = new Patient() { Id = Guid.NewGuid().ToString("N") }; + + public Patient PatientWithGender { get; set; } = new Patient() { Id = Guid.NewGuid().ToString("N"), Gender = AdministrativeGender.Male }; + + public string FixtureTag { get; } = Guid.NewGuid().ToString(); + + protected override async Task OnInitializedAsync() + { + await ImportTestHelper.ImportToServerAsync( + TestFhirClient, + CloudStorageAccount, + PatientAddressCityAndFamily.AddTestTag(FixtureTag), + PatientWithSameCity1.AddTestTag(FixtureTag), + PatientWithSameCity2.AddTestTag(FixtureTag), + PatientWithGender.AddTestTag(FixtureTag)); + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportBasicSearchTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportBasicSearchTests.cs new file mode 100644 index 0000000000..e0c391ca06 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportBasicSearchTests.cs @@ -0,0 +1,56 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Microsoft.Health.Fhir.Tests.E2E.Common; +using Microsoft.Health.Test.Utilities; +using Xunit; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + [Trait(Traits.Category, Categories.Import)] + [HttpIntegrationFixtureArgumentSets(DataStore.SqlServer, Format.Json)] + public class ImportBasicSearchTests : IClassFixture + { + private readonly TestFhirClient _client; + private readonly ImportBasicSearchTestFixture _fixture; + + public ImportBasicSearchTests(ImportBasicSearchTestFixture fixture) + { + _client = fixture.TestFhirClient; + _fixture = fixture; + } + + [Fact] + public async Task GivenImportedResourceWithVariousValues_WhenSearchedWithMultipleParams_ThenOnlyResourcesMatchingAllSearchParamsShouldBeReturned() + { + Patient patientAddressCityAndFamily = _fixture.PatientAddressCityAndFamily; + string query = string.Format("Patient?address-city={0}&family={1}&_tag={2}", patientAddressCityAndFamily.Address[0].City, patientAddressCityAndFamily.Name[0].Family, _fixture.FixtureTag); + + await ImportTestHelper.VerifySearchResultAsync(_fixture.TestFhirClient, query, patientAddressCityAndFamily); + } + + [Fact] + public async Task GivenImportedResourceWithVariousValues_WhenSearchedWithCityParam_ThenOnlyResourcesMatchingAllSearchParamsShouldBeReturned() + { + string query = string.Format("Patient?address-city={0}&_tag={1}", _fixture.PatientWithSameCity1.Address[0].City, _fixture.FixtureTag); + + await ImportTestHelper.VerifySearchResultAsync(_fixture.TestFhirClient, query, _fixture.PatientWithSameCity1, _fixture.PatientWithSameCity2); + } + + [Fact] + public async Task GivenImportedResourceWithVariousValues_WhenSearchedWithTheMissingModifer_ThenOnlyTheResourcesWithMissingOrPresentParametersAreReturned() + { + string queryMissingFalse = string.Format("Patient?gender:missing=false&_tag={0}", _fixture.FixtureTag); + await ImportTestHelper.VerifySearchResultAsync(_fixture.TestFhirClient, queryMissingFalse, _fixture.PatientWithGender); + + string queryMissing = string.Format("Patient?gender:missing=true&_tag={0}", _fixture.FixtureTag); + await ImportTestHelper.VerifySearchResultAsync(_fixture.TestFhirClient, queryMissing, _fixture.PatientAddressCityAndFamily, _fixture.PatientWithSameCity1, _fixture.PatientWithSameCity2); + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportCompositeSearchTestFixture.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportCompositeSearchTestFixture.cs new file mode 100644 index 0000000000..6ce33929e1 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportCompositeSearchTestFixture.cs @@ -0,0 +1,99 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + public class ImportCompositeSearchTestFixture : ImportTestFixture + { + private static readonly string[] ObservationTestFileNames = + { + "ObservationWith1MinuteApgarScore", + "ObservationWith20MinuteApgarScore", + "ObservationWithEyeColor", + "ObservationWithLongEyeColor", + "ObservationWithTemperature", + "ObservationWithTPMTDiplotype", + "ObservationWithTPMTHaplotypeOne", + "ObservationWithBloodPressure", + }; + + private static readonly string[] DocumentReferenceTestFiles = + { + "DocumentReference-example-relatesTo-code-appends", + "DocumentReference-example-relatesTo-code-transforms-replaces-target", + "DocumentReference-example-relatesTo-code-transforms", + }; + + public ImportCompositeSearchTestFixture(DataStore dataStore, Format format, TestFhirServerFactory testFhirServerFactory) + : base(dataStore, format, testFhirServerFactory) + { + } + + public string FixtureTag { get; } = Guid.NewGuid().ToString(); + + public IReadOnlyDictionary Observations { get; private set; } + + public IReadOnlyDictionary DocumentReferences { get; private set; } + + protected override async Task OnInitializedAsync() + { + Observations = CreateResultDictionary(ObservationTestFileNames); + DocumentReferences = CreateResultDictionary(DocumentReferenceTestFiles); + + List resources = new List(); + resources.AddRange(Observations.Values); + resources.AddRange(DocumentReferences.Values); + + await ImportTestHelper.ImportToServerAsync( + TestFhirClient, + CloudStorageAccount, + resources.ToArray()); + } + + private Dictionary CreateResultDictionary(string[] files) + where T : Resource + { + var resultDictionary = new Dictionary(files.Length); + + for (int i = 0; i < files.Length; i++) + { + string testFileName = files[i]; + + T result = GetResourceFromFile(testFileName); + + resultDictionary.Add(testFileName, result); + } + + return resultDictionary; + } + + private T GetResourceFromFile(string testFileName) + where T : Resource + { + T resource = Samples.GetJsonSample(testFileName); + + switch (resource) + { + case Observation o: + o.AddTestTag(FixtureTag); + o.Id = Guid.NewGuid().ToString("N"); + break; + case DocumentReference d: + d.AddTestTag(FixtureTag); + d.Id = Guid.NewGuid().ToString("N"); + break; + } + + return resource; + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportCompositeSearchTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportCompositeSearchTests.cs new file mode 100644 index 0000000000..b6538bbc4b --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportCompositeSearchTests.cs @@ -0,0 +1,145 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Linq; +using System.Threading.Tasks; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Microsoft.Health.Fhir.Tests.E2E.Common; +using Microsoft.Health.Fhir.Tests.E2E.Rest.Search; +using Microsoft.Health.Test.Utilities; +using Xunit; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + [Trait(Traits.Category, Categories.Import)] + [HttpIntegrationFixtureArgumentSets(DataStore.SqlServer, Format.Json)] + public class ImportCompositeSearchTests : IClassFixture + { + private const string ObservationWith1MinuteApgarScore = "ObservationWith1MinuteApgarScore"; + private const string ObservationWith20MinuteApgarScore = "ObservationWith20MinuteApgarScore"; + private const string ObservationWithTemperature = "ObservationWithTemperature"; + private const string ObservationWithTPMTDiplotype = "ObservationWithTPMTDiplotype"; + private const string ObservationWithTPMTHaplotypeOne = "ObservationWithTPMTHaplotypeOne"; + private const string ObservationWithBloodPressure = "ObservationWithBloodPressure"; + private const string ObservationWithEyeColor = "ObservationWithEyeColor"; + private const string ObservationWithLongEyeColor = "ObservationWithLongEyeColor"; + private const string DocumentReferenceExample = "DocumentReference-example-relatesTo-code-appends"; + private const string DocumentReferenceExample002 = "DocumentReference-example-relatesTo-code-transforms-replaces-target"; + + private readonly TestFhirClient _client; + private readonly ImportCompositeSearchTestFixture _fixture; + + public ImportCompositeSearchTests(ImportCompositeSearchTestFixture fixture) + { + _client = fixture.TestFhirClient; + _fixture = fixture; + } + + [Theory] + [InlineData("code-value-quantity=http://snomed.info/sct|443849008$10|http://unitsofmeasure.org|{score}", ObservationWith20MinuteApgarScore)] + [InlineData("code-value-quantity=443849008$10|http://unitsofmeasure.org|{score}", ObservationWith20MinuteApgarScore)] + [InlineData("code-value-quantity=http://snomed.info/sct|443849008$10", ObservationWith20MinuteApgarScore)] + [InlineData("code-value-quantity=http://snomed.info/sct|443849008$10||{score}", ObservationWith20MinuteApgarScore)] + [InlineData("code-value-quantity=http://snomed.info/sct|443849008$eq10|http://unitsofmeasure.org|{score}", ObservationWith20MinuteApgarScore)] + [InlineData("code-value-quantity=http://snomed.info/sct|443849008$ne10|http://unitsofmeasure.org|{score}")] + [InlineData("code-value-quantity=http://snomed.info/sct|443849008$lt10|http://unitsofmeasure.org|{score}")] + [InlineData("code-value-quantity=http://snomed.info/sct|443849008$le10|http://unitsofmeasure.org|{score}", ObservationWith20MinuteApgarScore)] + [InlineData("code-value-quantity=http://snomed.info/sct|443849008$gt10|http://unitsofmeasure.org|{score}")] + [InlineData("code-value-quantity=http://snomed.info/sct|443849008$ge10|http://unitsofmeasure.org|{score}", ObservationWith20MinuteApgarScore)] + [InlineData("code-value-quantity=http://loinc.org|8310-5$39|http://unitsofmeasure.org|Cel", ObservationWithTemperature)] + [InlineData("code-value-quantity=http://loinc.org|8331-1$39|http://unitsofmeasure.org|Cel", ObservationWithTemperature)] + [InlineData("code-value-quantity=http://snomed.info/sct|56342008$39|http://unitsofmeasure.org|Cel", ObservationWithTemperature)] + [InlineData("combo-code-value-quantity=http://loinc.org|9272-6$0|http://unitsofmeasure.org|{score}", ObservationWith1MinuteApgarScore)] // Match: Observation.code against Observation.valueQuantity + [InlineData("combo-code-value-quantity=http://snomed.info/sct|169895004$0|http://unitsofmeasure.org|{score}", ObservationWith1MinuteApgarScore)] // Match: Observation.code against Observation.valueQuantity + [InlineData("combo-code-value-quantity=85354-9$107")] // Not match: Observation.code against Observation.component[0].valueQuantity + [InlineData("combo-code-value-quantity=8480-6$107", ObservationWithBloodPressure)] // Match: Observation.component[0].code against Observation.component[0].valueQuantity + [InlineData("combo-code-value-quantity=8480-6$60")] // Not match: Observation.component[0].code against Observation.component[1].valueQuantity + [InlineData("code-value-quantity=unknownSystem|443849008$10")] + [InlineData("code-value-quantity=http://snomed.info/sct|443849008$eq10|unknownSystem|{score}")] + [InlineData("code-value-quantity=http://snomed.info/sct|443849008$eq10|http://unitsofmeasure.org|unknownQuantityId")] + [InlineData("code-value-quantity=http://loinc.org|8310-5$39|http://unitsofmeasure.org|Cel,http://snomed.info/sct|443849008$10|http://unitsofmeasure.org|{score}", ObservationWith20MinuteApgarScore, ObservationWithTemperature)] + [InlineData("code-value-quantity=http://loinc.org|8310-5$gt36.6|http://unitsofmeasure.org|Cel,http://loinc.org|9272-6$0|http://unitsofmeasure.org|{score}", ObservationWith1MinuteApgarScore, ObservationWithTemperature)] + public async Task GivenACompositeSearchParameterWithTokenAndQuantity_WhenSearchedForImportedResources_ThenCorrectBundleShouldBeReturned(string queryValue, params string[] expectedObservationNames) + { + await SearchAndValidateObservations(queryValue, expectedObservationNames); + } + + [Theory] + [InlineData("code-value-string=http://snomed.info/sct|162806009$blue", ObservationWithEyeColor)] + [InlineData("code-value-string=162806009$blue", ObservationWithEyeColor)] + [InlineData("code-value-string=162806009$Lorem", ObservationWithLongEyeColor)] + [InlineData("code-value-string=162806009$" + StringSearchTestFixture.LongString, ObservationWithLongEyeColor)] + [InlineData("code-value-string=162806009$" + StringSearchTestFixture.LongString + "Not")] + [InlineData("code-value-string=http://snomed.info/sct|$blue", ObservationWithEyeColor)] + [InlineData("code-value-string=http://snomed.info/sct|162806009$red")] + [InlineData("code-value-string=162806009$Lorem,162806009$blue", ObservationWithLongEyeColor, ObservationWithEyeColor)] + public async Task GivenACompositeSearchParameterWithTokenAndString_WhenSearchedForImportedResources_ThenCorrectBundleShouldBeReturned(string queryValue, params string[] expectedObservationNames) + { + await SearchAndValidateObservations(queryValue, expectedObservationNames); + } + + [Theory] + [InlineData("relationship=DocumentReference/example-appends$http://hl7.org/fhir/document-relationship-type|appends", DocumentReferenceExample)] + [InlineData("relationship=DocumentReference/example-appends$appends", DocumentReferenceExample)] + [InlineData("relationship=DocumentReference/example-appends$replaces")] + [InlineData("relationship=DocumentReference/example-replaces$replaces", DocumentReferenceExample002)] + [InlineData("relationship=DocumentReference/example-appends$appends,DocumentReference/example-replaces$replaces", DocumentReferenceExample, DocumentReferenceExample002, Skip = "https://github.com/microsoft/fhir-server/issues/523")] + [InlineData("relationship=DocumentReference/example-appends,DocumentReference/example-replaces$replaces", DocumentReferenceExample, DocumentReferenceExample002, Skip = "https://github.com/microsoft/fhir-server/issues/523")] + public async Task GivenACompositeSearchParameterWithTokenAndReference_WhenSearchedForImportedResources_ThenCorrectBundleShouldBeReturned(string queryValue, params string[] expectedDocumentReferenceNames) + { + await SearchAndValidateDocumentReferences(queryValue, expectedDocumentReferenceNames); + } + + [Theory] + [InlineData("combo-code-value-concept=http://snomed.info/sct|443849008$http://loinc.org/la|LA6724-4")] // Not match: Observation.code against Observation.component[0].valueCodeableConcept.coding[0] + [InlineData("combo-code-value-concept=443849008$http://loinc.org/la|LA6724-4")] // Not match: Observation.code (without system) against Observation.component[0].valueCodeableConcept.coding[0] + [InlineData("combo-code-value-concept=http://snomed.info/sct|443849008$LA6724-4")] // Not match: Observation.code against Observation.component[0].valueCodeableConcept.coding[0] (without system) + [InlineData("combo-code-value-concept=443849008$LA6724-4")] // Not match: Observation.code (without system) against Observation.component[0].valueCodeableConcept.coding[0] (without system) + [InlineData("combo-code-value-concept=|443849008$http://loinc.org/la|LA6724-4")] // Not match: Observation.code (with explicit no system) against Observation.component[0].valueCodeableConcept + [InlineData("combo-code-value-concept=http://snomed.info/sct|443849008$|LA6724-4")] // Not match: Observation.code against Observation.component[0].valueCodeableConcept.coding[0] (with explicit no system) + [InlineData("combo-code-value-concept=http://snomed.info/sct|443849008$http://snomed.info/sct|443849008")] // Not match: Observation.code against Observation.code + [InlineData("combo-code-value-concept=http://snomed.info/sct|443849008$http://snomed.info/sct|249227004")] // Not match: Observation.code against Observation.component[0].code + [InlineData("combo-code-value-concept=http://snomed.info/sct|443849008$http:/acme.ped/apgarcolor|2")] // Not match: Observation.code against Observation.component[0].valueCodeableConcept.coding[1] + [InlineData("combo-code-value-concept=http://snomed.info/sct|249227004$http://loinc.org/la|LA6724-4", ObservationWith20MinuteApgarScore)] // Match: Observation.component[0].code against Observation.component[0].valueCodeableConcept.coding[0] + [InlineData("combo-code-value-concept=http://snomed.info/sct|249227004$http:/acme.ped/apgarcolor|2", ObservationWith20MinuteApgarScore)] // Match: Observation.component[1].code against Observation.component[0].valueCodeableConcept.coding[1] + [InlineData("combo-code-value-concept=http://loinc.org/la|LA6724-4$http:/acme.ped/apgarcolor|2")] // Not match: Observation.component[0].valueCodeableConcept.coding[0] against Observation.component[0].valueCodeableConcept.coding[1] + [InlineData("combo-code-value-concept=169895004$http://loinc.org/la|LA6725-1")] // Not match: Observation.code[1] against Observation.component[4].valueCodeableConcept.coding[0] + [InlineData("combo-code-value-concept=http://snomed.info/sct|249227004$http://loinc.org/la|LA6722-8", ObservationWith1MinuteApgarScore)] // Match: Observation.component[0].code against Observation.component[0].valueCodeableConcept.coding[0] + [InlineData("combo-code-value-concept=http://snomed.info/sct|249227004$http://loinc.org/la|LA6722-8,http://snomed.info/sct|249227004$http://loinc.org/la|LA6724-4", ObservationWith20MinuteApgarScore, ObservationWith1MinuteApgarScore)] + public async Task GivenACompositeSearchParameterWithTokenAndToken_WhenSearchedForImportedResources_ThenCorrectBundleShouldBeReturned(string queryValue, params string[] expectedObservationNames) + { + await SearchAndValidateObservations(queryValue, expectedObservationNames); + } + + private async Task SearchAndValidateObservations(string queryValue, string[] expectedObservationNames) + { + Bundle bundle = await SearchAsync(ResourceType.Observation, queryValue); + + Observation[] expected = expectedObservationNames.Select(name => _fixture.Observations[name]).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + + private async Task SearchAndValidateDocumentReferences(string queryValue, string[] expectedDocumentReferenceNames) + { + Bundle bundle = await SearchAsync(ResourceType.DocumentReference, queryValue); + + DocumentReference[] expected = expectedDocumentReferenceNames.Select(name => _fixture.DocumentReferences[name]).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + + private async Task SearchAsync(ResourceType resourceType, string queryValue) + { + // Append the test session id. + return await _client.SearchAsync( + resourceType, + $"_tag={_fixture.FixtureTag}&{queryValue}"); + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportDateSearchTestFixture.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportDateSearchTestFixture.cs new file mode 100644 index 0000000000..1d8515aeda --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportDateSearchTestFixture.cs @@ -0,0 +1,46 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + public class ImportDateSearchTestFixture : ImportTestFixture + { + public ImportDateSearchTestFixture(DataStore dataStore, Format format, TestFhirServerFactory testFhirServerFactory) + : base(dataStore, format, testFhirServerFactory) + { + } + + public string FixtureTag { get; } = Guid.NewGuid().ToString(); + + public IReadOnlyList Observations { get; private set; } + + protected override async Task OnInitializedAsync() + { + Observations = await ImportTestHelper.ImportToServerAsync( + TestFhirClient, + CloudStorageAccount, + p => SetObservation(p, "1979-12-31"), // 1979-12-31T00:00:00.0000000 <-> 1979-12-31T23:59:59.9999999 + p => SetObservation(p, "1980"), // 1980-01-01T00:00:00.0000000 <-> 1980-12-31T23:59:59.9999999 + p => SetObservation(p, "1980-05"), // 1980-05-01T00:00:00.0000000 <-> 1980-05-31T23:59:59.9999999 + p => SetObservation(p, "1980-05-11"), // 1980-05-11T00:00:00.0000000 <-> 1980-05-11T23:59:59.9999999 + p => SetObservation(p, "1980-05-11T16:32:15"), // 1980-05-11T16:32:15.0000000 <-> 1980-05-11T16:32:15.9999999 + p => SetObservation(p, "1980-05-11T16:32:15.500"), // 1980-05-11T16:32:15.5000000 <-> 1980-05-11T16:32:15.5000000 + p => SetObservation(p, "1981-01-01")); // 1981-01-01T00:00:00.0000000 <-> 1981-12-31T23:59:59.9999999 + + void SetObservation(Observation observation, string date) + { + observation.Status = ObservationStatus.Final; + observation.AddTestTag(FixtureTag); + observation.Effective = new FhirDateTime(date); + } + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportDateSearchTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportDateSearchTests.cs new file mode 100644 index 0000000000..44306adb9c --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportDateSearchTests.cs @@ -0,0 +1,162 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Linq; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Microsoft.Health.Fhir.Tests.E2E.Common; +using Microsoft.Health.Test.Utilities; +using Xunit; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + [Trait(Traits.Category, Categories.Import)] + [HttpIntegrationFixtureArgumentSets(DataStore.SqlServer, Format.Json)] + public class ImportDateSearchTests : IClassFixture + { + private readonly TestFhirClient _client; + private readonly ImportDateSearchTestFixture _fixture; + + public ImportDateSearchTests(ImportDateSearchTestFixture fixture) + { + _client = fixture.TestFhirClient; + _fixture = fixture; + } + + // http://hl7.org/fhir/search.html#prefix + // eq: the range of the search value has to fully contains the range of the target value. + // ne: the range of the search value does not fully contain the range of the target value. + // gt: the range above the search value intersects (i.e. overlaps) with the range of the target value. + // lt: the range below the search value intersects (i.e. overlaps) with the range of the target value. + // le: the range below the search value intersects (i.e. overlaps) with the range of the target value or the range of the search value fully contains the range of the target value. + // ge: the range above the search value intersects (i.e. overlaps) with the range of the target value, or the range of the search value fully contains the range of the target value. + // sa: the range of the search value does not overlap with the range of the target value, and the range above the search value contains the range of the target value. + // eb: the range of the search value does overlap not with the range of the target value, and the range below the search value contains the range of the target value. + [Theory] + [InlineData("1980", 1, 2, 3, 4, 5)] // Any dates with start time greater than or equal to 1980-01-01T00:00:00.0000000 and end time less than or equal to 1980-12-31T23:59:59.9999999. + [InlineData("1980-01")] // Any dates with start time greater than or equal to 1980-01-01T00:00:00.0000000 and end time less than or equal to 1980-01-31T23:59:59.9999999. + [InlineData("1980-05", 2, 3, 4, 5)] // Any dates with start time greater than or equal to 1980-05-01T00:00:00.0000000 and end time less than or equal to 1980-05-31T23:59:59.9999999. + [InlineData("1980-05-10")] // Any dates with start time greater than or equal to 1980-05-10T00:00:00.0000000 and end time less than or equal to 1980-05-10T23:59:59.9999999. + [InlineData("1980-05-11", 3, 4, 5)] // Any dates with start time greater than or equal to 1980-05-11T00:00:00.0000000 and end time less than or equal to 1980-05-11T23:59:59.9999999. + [InlineData("1980-05-11T16:32:15", 4, 5)] // Any dates with start time greater than or equal to 1980-05-11T16:32:15.0000000 and end time less than or equal to 1980-05-11T16:32:15.9999999. + [InlineData("1980-05-11T16:32:15.500", 5)] // Any dates with start time greater than or equal to 1980-05-11T16:32:30.5000000 and end time less than or equal to 1980-05-11T16:32:30.50000000. + [InlineData("1980-05-11T16:32:15.5000000", 5)] // Any dates with start time greater than or equal to 1980-05-11T16:32:30.5000000 and end time less than or equal to 1980-05-11T16:32:30.50000000. + [InlineData("1980-05-11T16:32:15.5000001")] // Any dates with start time greater than or equal to 1980-05-11T16:32:30.50000001 and end time less than or equal to 1980-05-11T16:32:30.50000001. + [InlineData("1980-05-11T16:32:30")] // Any dates with start time greater than or equal to 1980-05-11T16:32:30.0000000 and end time less than or equal to 1980-05-11T16:32:30.9999999. + [InlineData("ne1980", 0, 6)] // Any dates with start time less than 1980-01-01T00:00:00.0000000 or end time greater than 1980-12-31T23:59:59.9999999. + [InlineData("ne1980-01", 0, 1, 2, 3, 4, 5, 6)] // Any dates with start time less than 1980-01-01T00:00:00.0000000 or end time greater than 1980-01-31T23:59:59.9999999. + [InlineData("ne1980-05", 0, 1, 6)] // Any dates with start time less than 1980-05-01T00:00:00.0000000 or end time greater than 1980-05-31T23:59:59.9999999. + [InlineData("ne1980-05-10", 0, 1, 2, 3, 4, 5, 6)] // Any dates with start time less than 1980-05-10T00:00:00.0000000 or end time greater than 1980-05-10T23:59:59.9999999. + [InlineData("ne1980-05-11", 0, 1, 2, 6)] // Any dates with start time less than 1980-05-11T00:00:00.0000000 or end time greater than 1980-05-11T23:59:59.9999999. + [InlineData("ne1980-05-11T16:32:15", 0, 1, 2, 3, 6)] // Any dates with start time less than 1980-05-11T16:32:15.0000000 or end time greater than 1980-05-11T16:32:15.9999999. + [InlineData("ne1980-05-11T16:32:15.500", 0, 1, 2, 3, 4, 6)] // Any dates with start time less than 1980-05-11T16:32:15.5000000 or end time greater than 1980-05-11T16:32:15.5000000. + [InlineData("ne1980-05-11T16:32:15.5000000", 0, 1, 2, 3, 4, 6)] // Any dates with start time less than 1980-05-11T16:32:15.5000000 or end time greater than 1980-05-11T16:32:15.5000000. + [InlineData("ne1980-05-11T16:32:15.5000001", 0, 1, 2, 3, 4, 5, 6)] // Any dates with start time less than 1980-05-11T16:32:15.5000001 or end time greater than 1980-05-11T16:32:15.5000001. + [InlineData("ne1980-05-11T16:32:30", 0, 1, 2, 3, 4, 5, 6)] // Any dates with start time less than 1980-05-11T16:32:30.0000000 or end time greater than 1980-05-11T16:32:30.9999999. + [InlineData("lt1980", 0)] // Only dates with start time earlier than 1980-01-01T00:00:00.0000000 would match. + [InlineData("lt1980-04", 0, 1)] // Only dates with start time earlier than 1980-04-01T00:00:00.0000000 would match. + [InlineData("lt1980-05", 0, 1)] // Only dates with start time earlier than 1980-05-01T00:00:00.0000000 would match. + [InlineData("lt1980-05-10", 0, 1, 2)] // Only dates with start time earlier than 1980-05-10T00:00:00.0000000 would match. + [InlineData("lt1980-05-11", 0, 1, 2)] // Only dates with start time earlier than 1980-05-11T00:00:00.0000000 would match. + [InlineData("lt1980-05-11T16:32:14", 0, 1, 2, 3)] // Only dates with start time earlier than 1980-05-11T16:32:14.0000000 would match. + [InlineData("lt1980-05-11T16:32:15", 0, 1, 2, 3)] // Only dates with start time earlier than 1980-05-11T16:32:15.0000000 would match. + [InlineData("lt1980-05-11T16:32:15.4999999", 0, 1, 2, 3, 4)] // Only dates with start time earlier than 1980-05-11T16:32:15.49999999 would match. + [InlineData("lt1980-05-11T16:32:15.500", 0, 1, 2, 3, 4)] // Only dates with start time earlier than 1980-05-11T16:32:15.5000000 would match. + [InlineData("lt1980-05-11T16:32:15.5000000", 0, 1, 2, 3, 4)] // Only dates with start time earlier than 1980-05-11T16:32:15.5000000 would match. + [InlineData("lt1980-05-11T16:32:15.5000001", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than 1980-05-11T16:32:15.5000001 would match. + [InlineData("lt1980-05-11T16:32:16", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than 1980-05-11T16:32:16.0000000 would match. + [InlineData("lt1980-05-12", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than 1980-05-12T00:00:00.0000000 would match. + [InlineData("lt1980-06", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than 1980-06-01T00:00:00.0000000 would match. + [InlineData("lt1981", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than 1981-01-01T00:00:00.0000000 would match. + [InlineData("lt1981-01-01T00:00:00.0000001", 0, 1, 2, 3, 4, 5, 6)] // Only dates with start time earlier than 1981-01-01T00:00:00.0000001 would match. + [InlineData("gt1979-12-31T23:59:59.9999999", 1, 2, 3, 4, 5, 6)] // Only dates with end time later than 1979-12-31T23:59:59.9999999 would match. + [InlineData("gt1980", 6)] // Only dates with end time later than 1980-12-31T23:59:59.9999999 would match. + [InlineData("gt1980-04", 1, 2, 3, 4, 5, 6)] // Only dates with end time later than 1980-04-30T23:59:59.9999999 would match. + [InlineData("gt1980-05", 1, 6)] // Only dates with end time later than 1980-05-31T23:59:59.9999999 would match. + [InlineData("gt1980-05-11", 1, 2, 6)] // Only dates with end time later than 1980-05-11T23:59:59.9999999 would match. + [InlineData("gt1980-05-11T16:32:14", 1, 2, 3, 4, 5, 6)] // Only dates with end time later than 1980-05-11T16:32:14.9999999 would match. + [InlineData("gt1980-05-11T16:32:15", 1, 2, 3, 6)] // Only dates with end time later than 1980-05-11T16:32:15.9999999 would match. + [InlineData("gt1980-05-11T16:32:15.4999999", 1, 2, 3, 4, 5, 6)] // Only dates with end time later than 1980-05-11T16:32:15.4999999 would match. + [InlineData("gt1980-05-11T16:32:15.500", 1, 2, 3, 4, 6)] // Only dates with end time later than 1980-05-11T16:32:15.5000000 would match. + [InlineData("gt1980-05-11T16:32:15.5000000", 1, 2, 3, 4, 6)] // Only dates with end time later than 1980-05-11T16:32:15.5000000 would match. + [InlineData("gt1980-05-11T16:32:15.5000001", 1, 2, 3, 4, 6)] // Only dates with end time later than 1980-05-11T16:32:15.5000001 would match. + [InlineData("gt1980-05-11T16:32:16", 1, 2, 3, 6)] // Only dates with end time later than 1980-05-11T16:32:16.9999999 would match. + [InlineData("gt1980-05-12", 1, 2, 6)] // Only dates with end time later than 1980-05-12T23:59:59.9999999 would match. + [InlineData("gt1980-06", 1, 6)] // Only dates with end time later than 1980-06-01T23:59:59.9999999 would match. + [InlineData("gt1981-01-01T00:00:00.0000001", 6)] // Only dates with end time later than 1981-01-01T00:00:00.0000001 would match. + [InlineData("le1980", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than or equal to 1980-12-31T23:59:59.9999999 would match. + [InlineData("le1980-04", 0, 1)] // Only dates with start time earlier than or equal to 1980-04-30T23:59:59.9999999 would match. + [InlineData("le1980-05", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than or equal to 1980-05-31T23:59:59.9999999 would match. + [InlineData("le1980-05-10", 0, 1, 2)] // Only dates with start time earlier than or equal to 1980-05-10T23:59:59.9999999 would match. + [InlineData("le1980-05-11", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than or equal to 1980-05-11T23:59:59.9999999 would match. + [InlineData("le1980-05-11T16:32:14", 0, 1, 2, 3)] // Only dates with start time earlier than or equal to 1980-05-11T16:32:14.9999999 would match. + [InlineData("le1980-05-11T16:32:15", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than or equal to 1980-05-11T16:32:15.9999999 would match. + [InlineData("le1980-05-11T16:32:15.4999999", 0, 1, 2, 3, 4)] // Only dates with start time earlier than or equal to 1980-05-11T16:32:15.49999999 would match. + [InlineData("le1980-05-11T16:32:15.500", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than or equal to 1980-05-11T16:32:15.5000000 would match. + [InlineData("le1980-05-11T16:32:15.5000000", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than or equal to 1980-05-11T16:32:15.5000000 would match. + [InlineData("le1980-05-11T16:32:15.5000001", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than or equal to 1980-05-11T16:32:15.5000001 would match. + [InlineData("le1980-05-11T16:32:16", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than or equal to 1980-05-11T16:32:16.9999999 would match. + [InlineData("le1980-05-12", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than or equal to 1980-05-12T23:59:59.9999999 would match. + [InlineData("le1980-06", 0, 1, 2, 3, 4, 5)] // Only dates with start time earlier than or equal to 1980-06-30T23:59:59.9999999 would match. + [InlineData("le1981", 0, 1, 2, 3, 4, 5, 6)] // Only dates with start time earlier than or equal to 1981-12-31T23:59:59.9999999 would match. + [InlineData("le1981-01-01T00:00:00.0000001", 0, 1, 2, 3, 4, 5, 6)] // Only dates with start time earlier than or equal to 1981-01-01T00:00:00.0000001 would match. + [InlineData("ge1979-12-31T23:59:59.9999999", 0, 1, 2, 3, 4, 5, 6)] // Only dates with end time later than or equal to 1979-12-31T23:59:59.9999999 would match. + [InlineData("ge1980", 1, 2, 3, 4, 5, 6)] // Only dates with end time later than or equal to 1980-01-01T00:00:00.0000000 would match. + [InlineData("ge1980-04", 1, 2, 3, 4, 5, 6)] // Only dates with end time later than or equal to 1980-04-01T00:00:00.0000000 would match. + [InlineData("ge1980-05", 1, 2, 3, 4, 5, 6)] // Only dates with end time later than or equal to 1980-05-01T00:00:00.0000000 would match. + [InlineData("ge1980-05-11", 1, 2, 3, 4, 5, 6)] // Only dates with end time later than or equal to 1980-05-11T00:00:00.0000000 would match. + [InlineData("ge1980-05-11T16:32:14", 1, 2, 3, 4, 5, 6)] // Only dates with end time later than or equal to 1980-05-11T16:32:14.0000000 would match. + [InlineData("ge1980-05-11T16:32:15", 1, 2, 3, 4, 5, 6)] // Only dates with end time later than or equal to 1980-05-11T16:32:15.0000000 would match. + [InlineData("ge1980-05-11T16:32:15.4999999", 1, 2, 3, 4, 5, 6)] // Only dates with end time later than or equal to 1980-05-11T16:32:15.4999999 would match. + [InlineData("ge1980-05-11T16:32:15.500", 1, 2, 3, 4, 5, 6)] // Only dates with end time later than or equal to 1980-05-11T16:32:15.5000000 would match. + [InlineData("ge1980-05-11T16:32:15.5000000", 1, 2, 3, 4, 5, 6)] // Only dates with end time later than or equal to 1980-05-11T16:32:15.5000000 would match. + [InlineData("ge1980-05-11T16:32:15.5000001", 1, 2, 3, 4, 6)] // Only dates with end time later than or equal to 1980-05-11T16:32:15.5000001 would match. + [InlineData("ge1980-05-11T16:32:16", 1, 2, 3, 6)] // Only dates with end time later than 1980-05-11T16:32:16.0000000 would match. + [InlineData("ge1980-05-12", 1, 2, 6)] // Only dates with end time later than or equal to 1980-05-12T00:00:00.0000000 would match. + [InlineData("ge1980-06", 1, 6)] // Only dates with end time later than or equal to 1980-06-01T00:00:00.0000000 would match. + [InlineData("ge1981-01-01T00:00:00.0000001", 6)] // Only dates with end time later than or equal to 1981-01-01T00:00:00.0000001 would match. + [InlineData("sa1980", 6)] // Only dates with start time later than 1981-12-31T23:59:59.9999999 would match. + [InlineData("sa1980-04", 2, 3, 4, 5, 6)] // Only dates with start time later than 1980-04-30T23:59:59.9999999 would match. + [InlineData("sa1980-05", 6)] // Only dates with start time later than 1980-05-31T23:59:59.9999999 would match. + [InlineData("sa1980-05-10", 3, 4, 5, 6)] // Only dates with start time later than 1980-05-10T23:59:59.9999999 would match. + [InlineData("sa1980-05-11", 6)] // Only dates with start time later than 1980-05-11T23:59:59.9999999 would match. + [InlineData("sa1980-05-11T16:32:14", 4, 5, 6)] // Only dates with start time later than 1980-05-11T16:32:14.9999999 would match. + [InlineData("sa1980-05-11T16:32:15", 6)] // Only dates with start time later than 1980-05-11T16:32:15.9999999 would match. + [InlineData("sa1980-05-11T16:32:15.4999999", 5, 6)] // Only dates with start time later than 1980-05-11T16:32:15.49999999 would match. + [InlineData("sa1980-05-11T16:32:15.500", 6)] // Only dates with start time later than 1980-05-11T16:32:15.5000000 would match. + [InlineData("sa1980-05-11T16:32:15.5000000", 6)] // Only dates with start time later than 1980-05-11T16:32:15.5000000 would match. + [InlineData("sa1980-05-11T16:32:15.5000001", 6)] // Only dates with start time later than 1980-05-11T16:32:15.5000001 would match. + [InlineData("sa1980-05-11T16:32:16", 6)] // Only dates with start time later than 1980-05-11T16:32:16.9999999 would match. + [InlineData("sa1980-05-12", 6)] // Only dates with start time later than 1980-05-12T23:59:59.9999999 would match. + [InlineData("sa1980-06", 6)] // Only dates with start time later than 1980-06-30T23:59:59.9999999 would match. + [InlineData("sa1981")] // Only dates with start time later than 1981-12-31T23:59:59.9999999 would match. + [InlineData("sa1981-01-01T00:00:00.0000001")] // Only dates with start time later than 1981-01-01T00:00:00.0000001 would match. + [InlineData("eb1979-12-31T23:59:59.9999999")] // Only dates with end time earlier than 1979-12-31T23:59:59.9999999 would match. + [InlineData("eb1980", 0)] // Only dates with end time earlier than 1980-01-01T00:00:00.0000000 would match. + [InlineData("eb1980-04", 0)] // Only dates with end time earlier than 1980-04-01T00:00:00.0000000 would match. + [InlineData("eb1980-05", 0)] // Only dates with end time earlier than 1980-05-01T00:00:00.0000000 would match. + [InlineData("eb1980-05-11", 0)] // Only dates with end time earlier than 1980-05-11T00:00:00.0000000 would match. + [InlineData("eb1980-05-11T16:32:14", 0)] // Only dates with end time earlier than 1980-05-11T16:32:14.0000000 would match. + [InlineData("eb1980-05-11T16:32:15", 0)] // Only dates with end time earlier than 1980-05-11T16:32:15.0000000 would match. + [InlineData("eb1980-05-11T16:32:15.4999999", 0)] // Only dates with end time earlier than 1980-05-11T16:32:15.4999999 would match. + [InlineData("eb1980-05-11T16:32:15.500", 0)] // Only dates with end time earlier than 1980-05-11T16:32:15.5000000 would match. + [InlineData("eb1980-05-11T16:32:15.5000000", 0)] // Only dates with end time earlier than 1980-05-11T16:32:15.5000000 would match. + [InlineData("eb1980-05-11T16:32:15.5000001", 0, 5)] // Only dates with end time earlier than 1980-05-11T16:32:15.5000001 would match. + [InlineData("eb1980-05-11T16:32:16", 0, 4, 5)] // Only dates with end time later than 1980-05-11T16:32:16.0000000 would match. + [InlineData("eb1980-05-12", 0, 3, 4, 5)] // Only dates with end time earlier than 1980-05-12T00:00:00.0000000 would match. + [InlineData("eb1980-06", 0, 2, 3, 4, 5)] // Only dates with end time earlier than 1980-06-01T00:00:00.0000000 would match. + [InlineData("eb1981-01-01T00:00:00.0000001", 0, 1, 2, 3, 4, 5)] // Only dates with end time earlier than 1981-01-01T00:00:00.0000001 would match. + public async Task GivenADateTimeSearchParam_WhenSearched_ThenCorrectBundleShouldBeReturned(string queryValue, params int[] expectedIndices) + { + Bundle bundle = await _client.SearchAsync(ResourceType.Observation, $"date={queryValue}&_tag={_fixture.FixtureTag}"); + + Observation[] expected = expectedIndices.Select(i => _fixture.Observations[i]).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportNumberSearchTestFixture.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportNumberSearchTestFixture.cs new file mode 100644 index 0000000000..1ac90b5cb2 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportNumberSearchTestFixture.cs @@ -0,0 +1,48 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + public class ImportNumberSearchTestFixture : ImportTestFixture + { + public ImportNumberSearchTestFixture(DataStore dataStore, Format format, TestFhirServerFactory testFhirServerFactory) + : base(dataStore, format, testFhirServerFactory) + { + } + + public IReadOnlyList RiskAssessments { get; private set; } + + public string FixtureTag { get; } = Guid.NewGuid().ToString(); + + protected override async Task OnInitializedAsync() + { + RiskAssessments = await ImportTestHelper.ImportToServerAsync( + TestFhirClient, + CloudStorageAccount, + i => SetRiskAssessment(i, 1), + i => SetRiskAssessment(i, 4), + i => SetRiskAssessment(i, 5), + i => SetRiskAssessment(i, 6), + i => SetRiskAssessment(i, 100)); + + void SetRiskAssessment(RiskAssessment riskAssessment, int probability) + { + riskAssessment.Status = ObservationStatus.Final; + riskAssessment.Subject = new ResourceReference("Patient/123"); + riskAssessment.AddTestTag(FixtureTag); + riskAssessment.Prediction = new List + { + new RiskAssessment.PredictionComponent { Probability = new FhirDecimal(probability) }, + }; + } + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportNumberSearchTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportNumberSearchTests.cs new file mode 100644 index 0000000000..5be1fe2ce2 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportNumberSearchTests.cs @@ -0,0 +1,68 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Linq; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Microsoft.Health.Fhir.Tests.E2E.Common; +using Microsoft.Health.Test.Utilities; +using Xunit; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + [Trait(Traits.Category, Categories.Import)] + [HttpIntegrationFixtureArgumentSets(DataStore.SqlServer, Format.Json)] + public class ImportNumberSearchTests : IClassFixture + { + private readonly TestFhirClient _client; + private readonly ImportNumberSearchTestFixture _fixture; + + public ImportNumberSearchTests(ImportNumberSearchTestFixture fixture) + { + _client = fixture.TestFhirClient; + _fixture = fixture; + } + + [Theory] + [InlineData("3")] + [InlineData("5", 2)] + [InlineData("5.000", 2)] + [InlineData("eq3")] + [InlineData("eq5", 2)] + [InlineData("eq5.000", 2)] + [InlineData("ne5", 0, 1, 3, 4)] + [InlineData("ne5.000", 0, 1, 3, 4)] + [InlineData("lt4.9", 0, 1)] + [InlineData("lt5", 0, 1)] + [InlineData("lt5.000", 0, 1)] + [InlineData("lt5.01", 0, 1, 2)] + [InlineData("gt4.9", 2, 3, 4)] + [InlineData("gt5", 3, 4)] + [InlineData("gt5.000", 3, 4)] + [InlineData("gt5.1", 3, 4)] + [InlineData("le4.9", 0, 1)] + [InlineData("le5", 0, 1, 2)] + [InlineData("le5.000", 0, 1, 2)] + [InlineData("le5.0001", 0, 1, 2)] + [InlineData("ge4.9999", 2, 3, 4)] + [InlineData("ge5", 2, 3, 4)] + [InlineData("ge5.000", 2, 3, 4)] + [InlineData("ge5.001", 3, 4)] + [InlineData("sa4.9999", 2, 3, 4)] + [InlineData("sa5", 3, 4)] + [InlineData("eb5", 0, 1)] + [InlineData("eb5.0001", 0, 1, 2)] + public async Task GivenANumberSearchParam_WhenSearched_ThenCorrectBundleShouldBeReturned(string queryValue, params int[] expectedIndices) + { + Bundle bundle = await _client.SearchAsync(ResourceType.RiskAssessment, $"probability={queryValue}&_tag={_fixture.FixtureTag}"); + + RiskAssessment[] expected = expectedIndices.Select(i => _fixture.RiskAssessments[i]).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportQuantitySearchTestFixture.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportQuantitySearchTestFixture.cs new file mode 100644 index 0000000000..b4078ec539 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportQuantitySearchTestFixture.cs @@ -0,0 +1,49 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + public class ImportQuantitySearchTestFixture : ImportTestFixture + { + public ImportQuantitySearchTestFixture(DataStore dataStore, Format format, TestFhirServerFactory testFhirServerFactory) + : base(dataStore, format, testFhirServerFactory) + { + } + + public IReadOnlyList Observations { get; private set; } + + public string FixtureTag { get; } = Guid.NewGuid().ToString(); + + protected override async Task OnInitializedAsync() + { + Observations = await ImportTestHelper.ImportToServerAsync( + TestFhirClient, + CloudStorageAccount, + o => SetObservation(o, 0.002m, "unit1", "system1"), + o => SetObservation(o, 1.0m, "unit1", "system1"), + o => SetObservation(o, 3.12m, "unit1", "system2"), + o => SetObservation(o, 4.0m, "unit1", "system1"), + o => SetObservation(o, 5.0m, "unit1", "system1"), + o => SetObservation(o, 5.0m, "unit2", "system2"), + o => SetObservation(o, 6.0m, "unit2", "system2"), + o => SetObservation(o, 8.95m, "unit2", "system1"), + o => SetObservation(o, 10.0m, "unit1", "system1")); + + void SetObservation(Observation observation, decimal quantity, string unit, string system) + { + observation.Code = new CodeableConcept("system", "code"); + observation.Status = ObservationStatus.Registered; + observation.AddTestTag(FixtureTag); + observation.Value = new Quantity(quantity, unit, system); + } + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportQuantitySearchTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportQuantitySearchTests.cs new file mode 100644 index 0000000000..87970a428b --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportQuantitySearchTests.cs @@ -0,0 +1,173 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Linq; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Microsoft.Health.Fhir.Tests.E2E.Common; +using Microsoft.Health.Test.Utilities; +using Xunit; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + [Trait(Traits.Category, Categories.Import)] + [HttpIntegrationFixtureArgumentSets(DataStore.SqlServer, Format.Json)] + public class ImportQuantitySearchTests : IClassFixture + { + private readonly TestFhirClient _client; + private readonly ImportQuantitySearchTestFixture _fixture; + + public ImportQuantitySearchTests(ImportQuantitySearchTestFixture fixture) + { + _client = fixture.TestFhirClient; + _fixture = fixture; + } + + [Theory] + [InlineData("30")] + [InlineData("0.002", 0)] + [InlineData("2e-3", 0)] + [InlineData("2E-3", 0)] + [InlineData("5", 4, 5)] + [InlineData("5.000", 4, 5)] + [InlineData("eq30")] + [InlineData("eq2e-3", 0)] + [InlineData("eq2E-3", 0)] + [InlineData("eq5", 4, 5)] + [InlineData("eq5.000", 4, 5)] + [InlineData("ne5", 0, 1, 2, 3, 6, 7, 8)] + [InlineData("ne5.000", 0, 1, 2, 3, 6, 7, 8)] + [InlineData("lt4.9", 0, 1, 2, 3)] + [InlineData("lt5", 0, 1, 2, 3)] + [InlineData("lt5.000", 0, 1, 2, 3)] + [InlineData("lt5.01", 0, 1, 2, 3, 4, 5)] + [InlineData("gt4.9", 4, 5, 6, 7, 8)] + [InlineData("gt5", 6, 7, 8)] + [InlineData("gt5.000", 6, 7, 8)] + [InlineData("gt5.1", 6, 7, 8)] + [InlineData("le4.9", 0, 1, 2, 3)] + [InlineData("le5", 0, 1, 2, 3, 4, 5)] + [InlineData("le5.000", 0, 1, 2, 3, 4, 5)] + [InlineData("le5.0001", 0, 1, 2, 3, 4, 5)] + [InlineData("ge4.9999", 4, 5, 6, 7, 8)] + [InlineData("ge5", 4, 5, 6, 7, 8)] + [InlineData("ge5.000", 4, 5, 6, 7, 8)] + [InlineData("ge5.001", 6, 7, 8)] + [InlineData("sa4.9999", 4, 5, 6, 7, 8)] + [InlineData("sa5", 6, 7, 8)] + [InlineData("eb5", 0, 1, 2, 3)] + [InlineData("eb5.0001", 0, 1, 2, 3, 4, 5)] + [InlineData("30|system1")] + [InlineData("5|system1", 4)] + [InlineData("5.000|system1", 4)] + [InlineData("eq30|system1")] + [InlineData("eq5|system1", 4)] + [InlineData("eq5.000|system1", 4)] + [InlineData("ne2e-3", 1, 2, 3, 4, 5, 6, 7, 8)] + [InlineData("ne5|system1", 0, 1, 3, 7, 8)] + [InlineData("ne5.000|system1", 0, 1, 3, 7, 8)] + [InlineData("lt3e-3|system1", 0)] + [InlineData("lt4E-2", 0)] + [InlineData("lt4.9|system1", 0, 1, 3)] + [InlineData("lt5|system1", 0, 1, 3)] + [InlineData("lt5.000|system1", 0, 1, 3)] + [InlineData("lt5.01|system1", 0, 1, 3, 4)] + [InlineData("gt2e-3|system1", 1, 3, 4, 7, 8)] + [InlineData("gt2e-3|system2", 2, 5, 6)] + [InlineData("gt4.9|system1", 4, 7, 8)] + [InlineData("gt5|system1", 7, 8)] + [InlineData("gt5.000|system1", 7, 8)] + [InlineData("gt5.1|system1", 7, 8)] + [InlineData("le1E-2|system1", 0)] + [InlineData("le1E-2|system2")] + [InlineData("le4.9|system1", 0, 1, 3)] + [InlineData("le5|system1", 0, 1, 3, 4)] + [InlineData("le5.000|system1", 0, 1, 3, 4)] + [InlineData("le5.0001|system1", 0, 1, 3, 4)] + [InlineData("ge4.9999|system1", 4, 7, 8)] + [InlineData("ge5|system1", 4, 7, 8)] + [InlineData("ge5.000|system1", 4, 7, 8)] + [InlineData("ge5.001|system1", 7, 8)] + [InlineData("sa4.9999|system1", 4, 7, 8)] + [InlineData("sa5|system1", 7, 8)] + [InlineData("eb2E-2|system1", 0)] + [InlineData("eb5|system1", 0, 1, 3)] + [InlineData("eb5.0001|system1", 0, 1, 3, 4)] + [InlineData("30||unit2")] + [InlineData("2e-3||unit1", 0)] + [InlineData("2.00e-3||unit1", 0)] + [InlineData("2e-3||unit2")] + [InlineData("5||unit2", 5)] + [InlineData("5.000||unit2", 5)] + [InlineData("eq30||unit2")] + [InlineData("eq5||unit2", 5)] + [InlineData("eq5.000||unit2", 5)] + [InlineData("ne5||unit2", 6, 7)] + [InlineData("ne5.000||unit2", 6, 7)] + [InlineData("lt4.9||unit2")] + [InlineData("lt5||unit2")] + [InlineData("lt5.000||unit2")] + [InlineData("lt5.01||unit2", 5)] + [InlineData("gt4.9||unit2", 5, 6, 7)] + [InlineData("gt5||unit2", 6, 7)] + [InlineData("gt5.000||unit2", 6, 7)] + [InlineData("gt5.1||unit2", 6, 7)] + [InlineData("le4.9||unit2")] + [InlineData("le5||unit2", 5)] + [InlineData("le5.000||unit2", 5)] + [InlineData("le5.0001||unit2", 5)] + [InlineData("ge4.9999||unit2", 5, 6, 7)] + [InlineData("ge5||unit2", 5, 6, 7)] + [InlineData("ge5.000||unit2", 5, 6, 7)] + [InlineData("ge5.001||unit2", 6, 7)] + [InlineData("sa4.9999||unit2", 5, 6, 7)] + [InlineData("sa5||unit2", 6, 7)] + [InlineData("eb5||unit2")] + [InlineData("eb5.0001||unit2", 5)] + [InlineData("30|system1|unit2")] + [InlineData("5|system1|unit2")] + [InlineData("0.002|system1|unit1", 0)] + [InlineData("2e-3|system1|unit1", 0)] + [InlineData("2E-3|system1|unit1", 0)] + [InlineData("2e-3|system1|unit2")] + [InlineData("2e-3|system2|unit1")] + [InlineData("5.000|system1|unit2")] + [InlineData("eq30|system1|unit2")] + [InlineData("eq5|system1|unit2")] + [InlineData("eq5.000|system1|unit2")] + [InlineData("ne5|system1|unit2", 7)] + [InlineData("ne5.000|system1|unit2", 7)] + [InlineData("lt4.9|system1|unit2")] + [InlineData("lt5|system1|unit2")] + [InlineData("lt5.000|system1|unit2")] + [InlineData("lt5.01|system1|unit2")] + [InlineData("gt4.9|system1|unit2", 7)] + [InlineData("gt5|system1|unit2", 7)] + [InlineData("gt5.000|system1|unit2", 7)] + [InlineData("gt5.1|system1|unit2", 7)] + [InlineData("le4.9|system1|unit2")] + [InlineData("le5|system1|unit2")] + [InlineData("le5.000|system1|unit2")] + [InlineData("le5.0001|system1|unit2")] + [InlineData("ge4.9999|system1|unit2", 7)] + [InlineData("ge5|system1|unit2", 7)] + [InlineData("ge5.000|system1|unit2", 7)] + [InlineData("ge5.001|system1|unit2", 7)] + [InlineData("sa4.9999|system1|unit2", 7)] + [InlineData("sa5|system1|unit2", 7)] + [InlineData("eb5|system1|unit2")] + [InlineData("eb5.0001|system1|unit2")] + public async Task GivenAQuantitySearchParameterWithQuantity_WhenSearched_ThenCorrectBundleShouldBeReturned(string queryValue, params int[] expectedIndices) + { + Bundle bundle = await _client.SearchAsync(ResourceType.Observation, $"value-quantity={queryValue}&_tag={_fixture.FixtureTag}"); + + Observation[] expected = expectedIndices.Select(i => _fixture.Observations[i]).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportReferenceSearchTestFixture.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportReferenceSearchTestFixture.cs new file mode 100644 index 0000000000..f5288b33d7 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportReferenceSearchTestFixture.cs @@ -0,0 +1,37 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + public class ImportReferenceSearchTestFixture : ImportTestFixture + { + public ImportReferenceSearchTestFixture(DataStore dataStore, Format format, TestFhirServerFactory testFhirServerFactory) + : base(dataStore, format, testFhirServerFactory) + { + } + + public string FixtureTag { get; } = Guid.NewGuid().ToString(); + + public IReadOnlyList Patients { get; private set; } + + protected override async Task OnInitializedAsync() + { + Patients = await ImportTestHelper.ImportToServerAsync( + TestFhirClient, + CloudStorageAccount, + p => p.AddTestTag(FixtureTag).ManagingOrganization = new ResourceReference("Organization/123"), + p => p.AddTestTag(FixtureTag).ManagingOrganization = new ResourceReference("Organization/abc"), + p => p.AddTestTag(FixtureTag).ManagingOrganization = new ResourceReference("ijk"), // type not specified, but known constrained to be Organization + p => p.AddTestTag(FixtureTag).GeneralPractitioner = new List { new ResourceReference("Practitioner/p1") }, + p => p.AddTestTag(FixtureTag).GeneralPractitioner = new List { new ResourceReference("p2") }); // type not specified and not known because it could be Practitioner, Organization, or PractitionerRole + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportReferenceSearchTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportReferenceSearchTests.cs new file mode 100644 index 0000000000..4c9a731362 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportReferenceSearchTests.cs @@ -0,0 +1,52 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Linq; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Microsoft.Health.Fhir.Tests.E2E.Common; +using Microsoft.Health.Test.Utilities; +using Xunit; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + [Trait(Traits.Category, Categories.Import)] + [HttpIntegrationFixtureArgumentSets(DataStore.SqlServer, Format.Json)] + public class ImportReferenceSearchTests : IClassFixture + { + private readonly TestFhirClient _client; + private readonly ImportReferenceSearchTestFixture _fixture; + + public ImportReferenceSearchTests(ImportReferenceSearchTestFixture fixture) + { + _client = fixture.TestFhirClient; + _fixture = fixture; + } + + [Theory] + [InlineData("organization=Organization/123", 0)] + [InlineData("organization=123", 0)] + [InlineData("organization=Organization/1")] + [InlineData("organization=organization/123")] + [InlineData("organization=Organization/ijk", 2)] // This is specified in the resource as "ijk", without the type, but the type can only be Organization + [InlineData("organization=ijk", 2)] + [InlineData("general-practitioner=Practitioner/p1", 3)] + [InlineData("general-practitioner:Practitioner=Practitioner/p1", 3)] + [InlineData("general-practitioner:Practitioner=p1", 3)] + [InlineData("general-practitioner=Practitioner/p2")] // This is specified in the resource as "p2", without the type, but because the parameter can reference several types and we don't resolve references, this search does not succeed + [InlineData("general-practitioner:Practitioner=p2")] // This is specified in the resource as "p2", without the type, but because the parameter can reference several types and we don't resolve references, this search does not succeed + [InlineData("general-practitioner=p2", 4)] + public async Task GivenAReferenceSearchParam_WhenSearched_ThenCorrectBundleShouldBeReturned(string query, params int[] matchIndices) + { + Bundle bundle = await _client.SearchAsync(ResourceType.Patient, $"{query}&_tag={_fixture.FixtureTag}"); + + Patient[] expected = matchIndices.Select(i => _fixture.Patients[i]).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportStringSearchTestFixture.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportStringSearchTestFixture.cs new file mode 100644 index 0000000000..1240fa5ae5 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportStringSearchTestFixture.cs @@ -0,0 +1,57 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + public class ImportStringSearchTestFixture : ImportTestFixture + { + internal const string LongString = "Lorem ipsum dolor sit amet consectetur adipiscing elit. Ut eget ultricies justo. Maecenas bibendum convallis sodales. Vestibulum quis molestie dui. Nulla porta elementum tristique. Aenean neque libero convallis sit amet dui ullamcorper congue lacinia erat. Sed finibus ex ac massa tincidunt tristique. In sed auctor massa. Proin cursus porttitor arcu. Maecenas a leo nunc. Sed pretium porta volutpat. In aliquet tempor sapien vitae laoreet nisl tempor ac. Vestibulum lacus leo luctus vitae pharetra at tempus ac diam. Integer at dui eu dolor gravida vehicula. Phasellus malesuada elit orci quis maximus purus consectetur ac. In semper consequat augue sit amet ultricies."; + + public ImportStringSearchTestFixture(DataStore dataStore, Format format, TestFhirServerFactory testFhirServerFactory) + : base(dataStore, format, testFhirServerFactory) + { + } + + public IReadOnlyList Patients { get; private set; } + + public string FixtureTag { get; } = Guid.NewGuid().ToString(); + + protected override async Task OnInitializedAsync() + { + Patients = await ImportTestHelper.ImportToServerAsync( + TestFhirClient, + CloudStorageAccount, + p => SetPatientInfo(p, "Seattle", "Smith", given: "Bea"), + p => SetPatientInfo(p, "Portland", "Williams"), + p => SetPatientInfo(p, "Vancouver", "Anderson"), + p => SetPatientInfo(p, LongString, "Murphy"), + p => SetPatientInfo(p, "Montreal", "Richard", given: "Bea"), + p => SetPatientInfo(p, "New York", "Muller"), + p => SetPatientInfo(p, "Portland", "Müller"), + p => SetPatientInfo(p, "Moscow", "Richard,Muller")); + + void SetPatientInfo(Patient patient, string city, string family, string given = null) + { + patient.Address = new List
() + { + new Address { City = city }, + }; + + patient.Name = new List() + { + new HumanName { Family = family, Given = new[] { given } }, + }; + + patient.AddTestTag(FixtureTag); + } + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportStringSearchTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportStringSearchTests.cs new file mode 100644 index 0000000000..0d5816b3bf --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportStringSearchTests.cs @@ -0,0 +1,138 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Microsoft.Health.Fhir.Tests.E2E.Common; +using Microsoft.Health.Test.Utilities; +using Xunit; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + [Trait(Traits.Category, Categories.Import)] + [HttpIntegrationFixtureArgumentSets(DataStore.SqlServer, Format.Json)] + public class ImportStringSearchTests : IClassFixture + { + private readonly TestFhirClient _client; + private readonly ImportStringSearchTestFixture _fixture; + + public ImportStringSearchTests(ImportStringSearchTestFixture fixture) + { + _client = fixture.TestFhirClient; + _fixture = fixture; + } + + [Theory] + [Trait(Traits.Priority, Priority.One)] + [InlineData("", "seattle", true)] + [InlineData("", "SEATTLE", true)] + [InlineData("", "Seattle", true)] + [InlineData("", "Sea", true)] + [InlineData("", "sea", true)] + [InlineData("", "123", false)] + [InlineData(":exact", "Seattle", true)] + [InlineData(":exact", "seattle", false)] + [InlineData(":exact", "SEATTLE", false)] + [InlineData(":exact", "Sea", false)] + [InlineData(":contains", "att", true)] + [InlineData(":contains", "EAT", true)] + [InlineData(":contains", "123", false)] + public async Task GivenAStringSearchParam_WhenSearched_ThenCorrectBundleShouldBeReturned(string modifier, string valueToSearch, bool shouldMatch) + { + string query = string.Format("address-city{0}={1}&_tag={2}", modifier, valueToSearch, _fixture.FixtureTag); + + Bundle bundle = await _client.SearchAsync(ResourceType.Patient, query); + + Assert.NotNull(bundle); + + Patient expectedPatient = _fixture.Patients[0]; + + if (shouldMatch) + { + Assert.NotEmpty(bundle.Entry); + ImportTestHelper.VerifyBundle(bundle, expectedPatient); + } + else + { + Assert.Empty(bundle.Entry); + } + } + + [Theory] + [Trait(Traits.Priority, Priority.One)] + [InlineData("", "Lorem", true)] + [InlineData("", "NotLorem", false)] + [InlineData("", ImportStringSearchTestFixture.LongString, true)] + [InlineData("", "Not" + ImportStringSearchTestFixture.LongString, false)] + [InlineData(":exact", ImportStringSearchTestFixture.LongString, true)] + [InlineData(":exact", ImportStringSearchTestFixture.LongString + "Not", false)] + [InlineData(":contains", ImportStringSearchTestFixture.LongString, true)] + [InlineData(":contains", ImportStringSearchTestFixture.LongString + "Not", false)] + [InlineData(":contains", "Vestibulum", true)] + [InlineData(":contains", "NotInString", false)] + public async Task GivenAStringSearchParamAndAResourceWithALongSearchParamValue_WhenSearched_ThenCorrectBundleShouldBeReturned(string modifier, string valueToSearch, bool shouldMatch) + { + string query = string.Format("address-city{0}={1}&_tag={2}", modifier, valueToSearch, _fixture.FixtureTag); + + Bundle bundle = await _client.SearchAsync(ResourceType.Patient, query); + + Assert.NotNull(bundle); + + Patient expectedPatient = _fixture.Patients[3]; + + if (shouldMatch) + { + Assert.NotEmpty(bundle.Entry); + ImportTestHelper.VerifyBundle(bundle, expectedPatient); + } + else + { + Assert.Empty(bundle.Entry); + } + } + + [Fact] + public async Task GivenAStringSearchParamWithMultipleValues_WhenSearched_ThenCorrectBundleShouldBeReturned() + { + Bundle bundle = await _client.SearchAsync(ResourceType.Patient, $"family=Smith,Ander&_tag={_fixture.FixtureTag}"); + + ImportTestHelper.VerifyBundle(bundle, _fixture.Patients[0], _fixture.Patients[2]); + } + + [Fact] + public async Task GivenAStringSearchParamThatCoversSeveralFields_WhenSpecifiedTwiceInASearch_IntersectsTheTwoResultsProperly() + { + Bundle bundle = await _client.SearchAsync(ResourceType.Patient, $"name=Bea&name=Smith&_tag={_fixture.FixtureTag}"); + + ImportTestHelper.VerifyBundle(bundle, _fixture.Patients[0]); + } + + [HttpIntegrationFixtureArgumentSets(DataStore.SqlServer, Format.Json)] + [Theory] + [Trait(Traits.Priority, Priority.One)] + [InlineData("muller")] + [InlineData("müller")] + public async Task GivenAStringSearchParamWithAccentAndAResourceWithAccent_WhenSearched_ThenCorrectBundleShouldBeReturned(string searchText) + { + string query = $"name={searchText}&_total=accurate&_tag={_fixture.FixtureTag}"; + + Bundle bundle = await _client.SearchAsync(ResourceType.Patient, query); + + Assert.NotNull(bundle); + Assert.Equal(2, bundle.Total); + Assert.NotEmpty(bundle.Entry); + } + + [Fact] + public async Task GivenAEscapedStringSearchParams_WhenSearched_ThenCorrectBundleShouldBeReturned() + { + Bundle bundle = await _client.SearchAsync(ResourceType.Patient, $"name=Richard\\,Muller&_tag={_fixture.FixtureTag}"); + + ImportTestHelper.VerifyBundle(bundle, _fixture.Patients[7]); + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestFixture.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestFixture.cs new file mode 100644 index 0000000000..b26c0650df --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestFixture.cs @@ -0,0 +1,48 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using Microsoft.Azure.Storage; +using Microsoft.Azure.Storage.Auth; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + public class ImportTestFixture : HttpIntegrationTestFixture + { + private const string LocalIntegrationStoreConnectionString = "UseDevelopmentStorage=true"; + + public ImportTestFixture(DataStore dataStore, Format format, TestFhirServerFactory testFhirServerFactory) + : base(dataStore, format, testFhirServerFactory) + { + CloudStorageAccount storageAccount; + + string integrationStoreFromEnvironmentVariable = Environment.GetEnvironmentVariable("TestIntegrationStoreUri"); + string integrationStoreKeyFromEnvironmentVariable = Environment.GetEnvironmentVariable("TestIntegrationStoreKey"); + if (!string.IsNullOrEmpty(integrationStoreFromEnvironmentVariable) && !string.IsNullOrEmpty(integrationStoreKeyFromEnvironmentVariable)) + { + Uri integrationStoreUri = new Uri(integrationStoreFromEnvironmentVariable); + string storageAccountName = integrationStoreUri.Host.Split('.')[0]; + StorageCredentials storageCredentials = new StorageCredentials(storageAccountName, integrationStoreKeyFromEnvironmentVariable); + storageAccount = new CloudStorageAccount(storageCredentials, useHttps: true); + } + else + { + CloudStorageAccount.TryParse(LocalIntegrationStoreConnectionString, out storageAccount); + } + + if (storageAccount == null) + { + throw new Exception(string.Format("Unable to create a cloud storage account. {0}", integrationStoreFromEnvironmentVariable ?? string.Empty)); + } + + CloudStorageAccount = storageAccount; + } + + public CloudStorageAccount CloudStorageAccount { get; private set; } + + public string IntegrationStoreConnectionString { get; private set; } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestHelper.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestHelper.cs new file mode 100644 index 0000000000..b9c39ae45f --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestHelper.cs @@ -0,0 +1,177 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Hl7.Fhir.Model; +using Hl7.Fhir.Serialization; +using Microsoft.Azure.Storage; +using Microsoft.Azure.Storage.Blob; +using Microsoft.Health.Fhir.Api.Features.Operations.Import; +using Microsoft.Health.Fhir.Client; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.Core.Features.Operations.Import.Models; +using Microsoft.Health.Fhir.Tests.E2E.Common; +using Xunit; +using Resource = Hl7.Fhir.Model.Resource; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + public static class ImportTestHelper + { + private static readonly FhirJsonSerializer _fhirJsonSerializer = new FhirJsonSerializer(); + + public static async Task<(Uri location, string etag)> UploadFileAsync(string content, CloudStorageAccount cloudAccount) + { + string blobName = Guid.NewGuid().ToString("N"); + string containerName = Guid.NewGuid().ToString("N"); + + CloudBlobClient blobClient = cloudAccount.CreateCloudBlobClient(); + CloudBlobContainer container = blobClient.GetContainerReference(containerName); + await container.CreateIfNotExistsAsync(); + + CloudBlockBlob blob = container.GetBlockBlobReference(blobName); + await blob.UploadTextAsync(content); + + CloudBlob cloudBlob = container.GetBlobReference(blobName); + return (cloudBlob.Uri, cloudBlob.Properties.ETag); + } + + public static async Task DownloadFileAsync(string location, CloudStorageAccount cloudAccount) + { + CloudBlobClient blobClient = cloudAccount.CreateCloudBlobClient(); + ICloudBlob container = blobClient.GetBlobReferenceFromServer(new Uri(location)); + + using MemoryStream stream = new MemoryStream(); + await container.DownloadToStreamAsync(stream, CancellationToken.None); + + stream.Position = 0; + using StreamReader reader = new StreamReader(stream); + return await reader.ReadToEndAsync(); + } + + public static async Task VerifySearchResultAsync(TestFhirClient client, string query, params Resource[] resources) + { + Bundle result = await client.SearchAsync(query); + VerifyBundle(result, resources); + } + + public static void VerifyBundle(Bundle result, params Resource[] resources) + { + Assert.Equal(resources.Length, result.Entry.Count); + + foreach (Resource resultResource in result.Entry.Select(e => e.Resource)) + { + Assert.Contains(resources, expectedResource => expectedResource.Id.Equals(resultResource.Id)); + } + } + + public static async Task ImportToServerAsync(TestFhirClient testFhirClient, CloudStorageAccount cloudStorageAccount, params Action[] resourceCustomizer) + where TResource : Resource, new() + { + TResource[] resources = new TResource[resourceCustomizer.Length]; + + for (int i = 0; i < resources.Length; i++) + { + TResource resource = new TResource(); + + resourceCustomizer[i](resource); + resources[i] = resource; + resources[i].Id = Guid.NewGuid().ToString("N"); + } + + await ImportToServerAsync(testFhirClient, cloudStorageAccount, resources); + + return resources; + } + + public static async Task ImportToServerAsync(TestFhirClient testFhirClient, CloudStorageAccount cloudStorageAccount, params Resource[] resources) + { + Dictionary contentBuilders = new Dictionary(); + + foreach (Resource resource in resources) + { + string resourceType = resource.TypeName.ToString(); + if (!contentBuilders.ContainsKey(resourceType)) + { + contentBuilders[resourceType] = new StringBuilder(); + } + + contentBuilders[resourceType].AppendLine(_fhirJsonSerializer.SerializeToString(resource)); + } + + var inputFiles = new List(); + foreach ((string key, StringBuilder builder) in contentBuilders) + { + (Uri location, string etag) = await ImportTestHelper.UploadFileAsync(builder.ToString(), cloudStorageAccount); + inputFiles.Add(new InputResource() + { + Etag = etag, + Url = location, + Type = key, + }); + } + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = inputFiles, + }; + + await ImportCheckAsync(testFhirClient, request); + } + + public static async Task CreateImportTaskAsync(TestFhirClient testFhirClient, ImportRequest request) + { + while (true) + { + try + { + request.Mode = ImportConstants.InitialLoadMode; + request.Force = true; + Uri checkLocation = await testFhirClient.ImportAsync(request.ToParameters()); + return checkLocation; + } + catch (FhirException fhirException) + { + if (!HttpStatusCode.Conflict.Equals(fhirException.StatusCode)) + { + throw; + } + + await Task.Delay(TimeSpan.FromSeconds(5)); + } + } + } + + public static T AddTestTag(this T input, string tag) + where T : Resource + { + input.Meta = new Meta(); + input.Meta.Tag.Add(new Coding("http://e2e-test", tag)); + + return input; + } + + private static async Task ImportCheckAsync(TestFhirClient testFhirClient, ImportRequest request) + { + Uri checkLocation = await CreateImportTaskAsync(testFhirClient, request); + + while ((await testFhirClient.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + { + await Task.Delay(TimeSpan.FromSeconds(5)); + } + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs new file mode 100644 index 0000000000..1d139a2d9a --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs @@ -0,0 +1,487 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Api.Features.Operations.Import; +using Microsoft.Health.Fhir.Client; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.Core.Features.Operations.Import.Models; +using Microsoft.Health.Fhir.Tests.Common; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Microsoft.Health.Fhir.Tests.E2E.Common; +using Microsoft.Health.Test.Utilities; +using Newtonsoft.Json; +using Xunit; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + [Trait(Traits.Category, Categories.Import)] + [HttpIntegrationFixtureArgumentSets(DataStore.SqlServer, Format.Json)] + public class ImportTests : IClassFixture> + { + private const string ForbiddenMessage = "Forbidden: Authorization failed."; + + private readonly TestFhirClient _client; + private readonly ImportTestFixture _fixture; + + public ImportTests(ImportTestFixture fixture) + { + _client = fixture.TestFhirClient; + _fixture = fixture; + } + + [Fact] + [Trait(Traits.Category, Categories.Authorization)] + public async Task GivenAUserWithImportPermissions_WhenImportData_TheServerShouldReturnSuccess() + { + TestFhirClient tempClient = _client.CreateClientForUser(TestUsers.BulkImportUser, TestApplications.NativeClient); + string patientNdJsonResource = Samples.GetNdJson("Import-Patient"); + patientNdJsonResource = Regex.Replace(patientNdJsonResource, "##PatientID##", m => Guid.NewGuid().ToString("N")); + (Uri location, string etag) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource, _fixture.CloudStorageAccount); + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = location, + Type = "Patient", + }, + }, + }; + + await ImportCheckAsync(request, tempClient); + } + + [Fact] + [Trait(Traits.Category, Categories.Authorization)] + public async Task GivenAUserWithoutImportPermissions_WhenImportData_ThenServerShouldReturnForbidden() + { + TestFhirClient tempClient = _client.CreateClientForUser(TestUsers.ReadOnlyUser, TestApplications.NativeClient); + string patientNdJsonResource = Samples.GetNdJson("Import-Patient"); + (Uri location, string etag) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource, _fixture.CloudStorageAccount); + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = location, + Type = "Patient", + }, + }, + }; + + request.Mode = ImportConstants.InitialLoadMode; + request.Force = true; + FhirException fhirException = await Assert.ThrowsAsync(async () => await tempClient.ImportAsync(request.ToParameters(), CancellationToken.None)); + Assert.Equal(ForbiddenMessage, fhirException.Message); + Assert.Equal(HttpStatusCode.Forbidden, fhirException.StatusCode); + } + + [Fact] + public async Task GivenImportOperationEnabled_WhenImportOperationTriggered_ThenDataShouldBeImported() + { + string patientNdJsonResource = Samples.GetNdJson("Import-Patient"); + patientNdJsonResource = Regex.Replace(patientNdJsonResource, "##PatientID##", m => Guid.NewGuid().ToString("N")); + (Uri location, string etag) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource, _fixture.CloudStorageAccount); + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = location, + Etag = etag, + Type = "Patient", + }, + }, + }; + + await ImportCheckAsync(request); + } + + [Fact] + public async Task GivenImportOperationEnabled_WhenImportOperationTriggeredBeforePreviousTaskCompleted_ThenConflictShouldBeReturned() + { + string patientNdJsonResource = Samples.GetNdJson("Import-Patient"); + patientNdJsonResource = Regex.Replace(patientNdJsonResource, "##PatientID##", m => Guid.NewGuid().ToString("N")); + (Uri location, string etag) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource, _fixture.CloudStorageAccount); + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = location, + Etag = etag, + Type = "Patient", + }, + }, + }; + + request.Mode = ImportConstants.InitialLoadMode; + request.Force = true; + Uri checkLocation = await ImportTestHelper.CreateImportTaskAsync(_client, request); + FhirException fhirException = await Assert.ThrowsAsync(async () => await _client.ImportAsync(request.ToParameters(), CancellationToken.None)); + Assert.Equal(HttpStatusCode.Conflict, fhirException.StatusCode); + + HttpResponseMessage response; + while ((response = await _client.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + { + await Task.Delay(TimeSpan.FromSeconds(5)); + } + } + + [Fact] + public async Task GivenImportOperationEnabled_WhenImportOperationTriggeredWithoutEtag_ThenDataShouldBeImported() + { + string patientNdJsonResource = Samples.GetNdJson("Import-Patient"); + patientNdJsonResource = Regex.Replace(patientNdJsonResource, "##PatientID##", m => Guid.NewGuid().ToString("N")); + (Uri location, string _) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource, _fixture.CloudStorageAccount); + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = location, + Type = "Patient", + }, + }, + }; + + await ImportCheckAsync(request); + } + + [Fact] + public async Task GivenImportOperationEnabled_WhenImportResourceWithWrongType_ThenErrorLogShouldBeUploaded() + { + string patientNdJsonResource = Samples.GetNdJson("Import-Patient"); + patientNdJsonResource = Regex.Replace(patientNdJsonResource, "##PatientID##", m => Guid.NewGuid().ToString("N")); + (Uri location, string etag) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource, _fixture.CloudStorageAccount); + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = location, + Etag = etag, + Type = "Observation", // not match the resource + }, + }, + }; + + Uri checkLocation = await ImportTestHelper.CreateImportTaskAsync(_client, request); + + HttpResponseMessage response; + while ((response = await _client.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + { + await Task.Delay(TimeSpan.FromSeconds(5)); + } + + Assert.Equal(System.Net.HttpStatusCode.OK, response.StatusCode); + ImportTaskResult result = JsonConvert.DeserializeObject(await response.Content.ReadAsStringAsync()); + Assert.Single(result.Error); + Assert.NotEmpty(result.Error.First().Url); + } + + [Fact] + public async Task GivenImportOperationEnabled_WhenImportOperationTriggeredWithMultipleFiles_ThenDataShouldBeImported() + { + string patientNdJsonResource = Samples.GetNdJson("Import-SinglePatientTemplate"); + string resourceId1 = Guid.NewGuid().ToString("N"); + string patientNdJsonResource1 = patientNdJsonResource.Replace("##PatientID##", resourceId1); + string resourceId2 = Guid.NewGuid().ToString("N"); + string patientNdJsonResource2 = patientNdJsonResource.Replace("##PatientID##", resourceId2); + + (Uri location1, string _) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource1, _fixture.CloudStorageAccount); + (Uri location2, string _) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource2, _fixture.CloudStorageAccount); + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = location1, + Type = "Patient", + }, + new InputResource() + { + Url = location2, + Type = "Patient", + }, + }, + }; + + await ImportCheckAsync(request); + } + + [Fact] + public async Task GivenImportOperationEnabled_WhenImportInvalidResource_ThenErrorLogsShouldBeOutput() + { + string patientNdJsonResource = Samples.GetNdJson("Import-InvalidPatient"); + patientNdJsonResource = Regex.Replace(patientNdJsonResource, "##PatientID##", m => Guid.NewGuid().ToString("N")); + (Uri location, string etag) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource, _fixture.CloudStorageAccount); + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = location, + Etag = etag, + Type = "Patient", + }, + }, + }; + + Uri checkLocation = await ImportTestHelper.CreateImportTaskAsync(_client, request); + + HttpResponseMessage response; + while ((response = await _client.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + { + await Task.Delay(TimeSpan.FromSeconds(5)); + } + + Assert.Equal(System.Net.HttpStatusCode.OK, response.StatusCode); + ImportTaskResult result = JsonConvert.DeserializeObject(await response.Content.ReadAsStringAsync()); + Assert.NotEmpty(result.Output); + Assert.Equal(1, result.Error.Count); + Assert.NotEmpty(result.Request); + + string errorLoation = result.Error.ToArray()[0].Url; + string[] errorContents = (await ImportTestHelper.DownloadFileAsync(errorLoation, _fixture.CloudStorageAccount)).Split("\r\n", StringSplitOptions.RemoveEmptyEntries); + Assert.Single(errorContents); + } + + [Fact] + public async Task GivenImportOperationEnabled_WhenImportDuplicatedResource_ThenDupResourceShouldBeCleaned() + { + string patientNdJsonResource = Samples.GetNdJson("Import-DupPatientTemplate"); + string resourceId = Guid.NewGuid().ToString("N"); + patientNdJsonResource = patientNdJsonResource.Replace("##PatientID##", resourceId); + (Uri location, string etag) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource, _fixture.CloudStorageAccount); + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = location, + Etag = etag, + Type = "Patient", + }, + }, + }; + + await ImportCheckAsync(request, errorCount: 1); + await ImportCheckAsync(request, errorCount: 2); + + Patient patient = await _client.ReadAsync(ResourceType.Patient, resourceId); + Assert.Equal(resourceId, patient.Id); + } + + [Fact] + public async Task GivenImportOperationEnabled_WhenCancelImportTask_ThenTaskShouldBeCanceled() + { + string patientNdJsonResource = Samples.GetNdJson("Import-Patient"); + patientNdJsonResource = Regex.Replace(patientNdJsonResource, "##PatientID##", m => Guid.NewGuid().ToString("N")); + (Uri location, string etag) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource, _fixture.CloudStorageAccount); + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = location, + Etag = etag, + Type = "Patient", + }, + }, + }; + + Uri checkLocation = await ImportTestHelper.CreateImportTaskAsync(_client, request); + await _client.CancelImport(checkLocation); + FhirException fhirException = await Assert.ThrowsAsync(async () => await _client.CheckImportAsync(checkLocation)); + Assert.Equal(HttpStatusCode.BadRequest, fhirException.StatusCode); + } + + [Fact(Skip = "long running tests for invalid url")] + public async Task GivenImportOperationEnabled_WhenImportInvalidResourceUrl_ThenBadRequestShouldBeReturned() + { + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = new Uri("https://fhirtest-invalid.com"), + Type = "Patient", + }, + }, + }; + + Uri checkLocation = await ImportTestHelper.CreateImportTaskAsync(_client, request); + + FhirException fhirException = await Assert.ThrowsAsync( + async () => + { + HttpResponseMessage response; + while ((response = await _client.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + { + await Task.Delay(TimeSpan.FromSeconds(5)); + } + }); + Assert.Equal(HttpStatusCode.BadRequest, fhirException.StatusCode); + } + + [Fact] + public async Task GivenImportOperationEnabled_WhenImportInvalidETag_ThenBadRequestShouldBeReturned() + { + string patientNdJsonResource = Samples.GetNdJson("Import-Patient"); + patientNdJsonResource = Regex.Replace(patientNdJsonResource, "##PatientID##", m => Guid.NewGuid().ToString("N")); + (Uri location, string etag) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource, _fixture.CloudStorageAccount); + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = location, + Etag = "invalid", + Type = "Patient", + }, + }, + }; + + Uri checkLocation = await ImportTestHelper.CreateImportTaskAsync(_client, request); + + FhirException fhirException = await Assert.ThrowsAsync( + async () => + { + HttpResponseMessage response; + while ((response = await _client.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + { + await Task.Delay(TimeSpan.FromSeconds(5)); + } + }); + Assert.Equal(HttpStatusCode.BadRequest, fhirException.StatusCode); + } + + [Fact] + public async Task GivenImportOperationEnabled_WhenImportInvalidResourceType_ThenBadRequestShouldBeReturned() + { + string patientNdJsonResource = Samples.GetNdJson("Import-Patient"); + patientNdJsonResource = Regex.Replace(patientNdJsonResource, "##PatientID##", m => Guid.NewGuid().ToString("N")); + (Uri location, string etag) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource, _fixture.CloudStorageAccount); + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = location, + Type = "Invalid", + }, + }, + }; + + FhirException fhirException = await Assert.ThrowsAsync( + async () => await ImportTestHelper.CreateImportTaskAsync(_client, request)); + + Assert.Equal(HttpStatusCode.BadRequest, fhirException.StatusCode); + } + + private async Task ImportCheckAsync(ImportRequest request, TestFhirClient client = null, int? errorCount = null) + { + client = client ?? _client; + Uri checkLocation = await ImportTestHelper.CreateImportTaskAsync(client, request); + + HttpResponseMessage response; + while ((response = await client.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + { + await Task.Delay(TimeSpan.FromSeconds(5)); + } + + Assert.Equal(System.Net.HttpStatusCode.OK, response.StatusCode); + ImportTaskResult result = JsonConvert.DeserializeObject(await response.Content.ReadAsStringAsync()); + Assert.NotEmpty(result.Output); + if (errorCount != null) + { + Assert.Equal(errorCount.Value, result.Error.First().Count); + } + else + { + Assert.Empty(result.Error); + } + + Assert.NotEmpty(result.Request); + + return checkLocation; + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTokenSearchTestFixture.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTokenSearchTestFixture.cs new file mode 100644 index 0000000000..1335ebec75 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTokenSearchTestFixture.cs @@ -0,0 +1,76 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Client; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + public class ImportTokenSearchTestFixture : ImportTestFixture + { + public ImportTokenSearchTestFixture(DataStore dataStore, Format format, TestFhirServerFactory testFhirServerFactory) + : base(dataStore, format, testFhirServerFactory) + { + } + + public string FixtureTag { get; } = Guid.NewGuid().ToString(); + + public IReadOnlyList Observations { get; private set; } + + protected override async Task OnInitializedAsync() + { + await TestFhirClient.CreateResourcesAsync(p => + { + p.AddTestTag(FixtureTag); + }); + + Observations = await ImportTestHelper.ImportToServerAsync( + TestFhirClient, + CloudStorageAccount, + o => SetObservation(o, cc => cc.Coding.Add(new Coding("system1", "code1"))), + o => SetObservation(o, cc => cc.Coding.Add(new Coding("system2", "code2"))), + o => SetObservation(o, cc => cc.Text = "text"), + o => SetObservation(o, cc => cc.Coding.Add(new Coding("system1", "code2", "text2"))), + o => SetObservation(o, cc => cc.Coding.Add(new Coding("system3", "code3", "text"))), + o => SetObservation(o, cc => + { + cc.Text = "text"; + cc.Coding.Add(new Coding("system1", "code1")); + cc.Coding.Add(new Coding("system3", "code2")); + }), + o => SetObservation(o, cc => + { + cc.Coding.Add(new Coding("system2", "code1")); + cc.Coding.Add(new Coding("system3", "code3", "text2")); + }), + o => SetObservation(o, cc => cc.Coding.Add(new Coding(null, "code3"))), + o => + { + SetObservation(o, cc => { }); + o.Category = new List + { + new CodeableConcept("system", "test"), + }; + }); + + void SetObservation(Observation observation, Action codeableConceptCustomizer) + { + observation.AddTestTag(FixtureTag); + observation.Code = new CodeableConcept("system", "code"); + observation.Status = ObservationStatus.Registered; + + var codeableConcept = new CodeableConcept(); + + codeableConceptCustomizer(codeableConcept); + + observation.Value = codeableConcept; + } + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTokenSearchTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTokenSearchTests.cs new file mode 100644 index 0000000000..893481861a --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTokenSearchTests.cs @@ -0,0 +1,133 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Linq; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Microsoft.Health.Fhir.Tests.E2E.Common; +using Microsoft.Health.Test.Utilities; +using Xunit; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + [Trait(Traits.Category, Categories.Import)] + [HttpIntegrationFixtureArgumentSets(DataStore.SqlServer, Format.Json)] + public class ImportTokenSearchTests : IClassFixture + { + public static readonly object[][] TokenSearchParameterData = new[] + { + new object[] { "a" }, + new object[] { "code1", 0, 5, 6 }, + new object[] { "code3", 4, 6, 7 }, + new object[] { "a|b" }, + new object[] { "system2|code2", 1 }, + new object[] { "|code2" }, + new object[] { "|code3", 7 }, + new object[] { "a|" }, + new object[] { "system3|", 4, 5, 6 }, + new object[] { "code1,system2|code2", 0, 1, 5, 6 }, + }; + + private readonly TestFhirClient _client; + private readonly ImportTokenSearchTestFixture _fixture; + + public ImportTokenSearchTests(ImportTokenSearchTestFixture fixture) + { + _client = fixture.TestFhirClient; + _fixture = fixture; + } + + [Theory] + [MemberData(nameof(TokenSearchParameterData))] + public async Task GivenATokenSearchParameter_WhenSearched_ThenCorrectBundleShouldBeReturned(string queryValue, params int[] expectedIndices) + { + Bundle bundle = await _client.SearchAsync(ResourceType.Observation, $"value-concept={queryValue}&_tag={_fixture.FixtureTag}"); + + Observation[] expected = expectedIndices.Select(i => _fixture.Observations[i]).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + + [Theory] + [InlineData("code1")] + [InlineData("text", 2, 3, 4, 5, 6)] + [InlineData("text2", 3, 6)] + public async Task GivenATokenSearchParameterWithTextModifier_WhenSearched_ThenCorrectBundleShouldBeReturned(string queryValue, params int[] expectedIndices) + { + Bundle bundle = await _client.SearchAsync(ResourceType.Observation, $"value-concept:text={queryValue}&_tag={_fixture.FixtureTag}"); + + Observation[] expected = expectedIndices.Select(i => _fixture.Observations[i]).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + + [Theory] + [MemberData(nameof(TokenSearchParameterData))] + public async Task GivenATokenSearchParameterWithNotModifier_WhenSearched_ThenCorrectBundleShouldBeReturned(string queryValue, params int[] excludeIndices) + { + Bundle bundle = await _client.SearchAsync(ResourceType.Observation, $"value-concept:not={queryValue}&_tag={_fixture.FixtureTag}"); + + Observation[] expected = _fixture.Observations.Where((_, i) => !excludeIndices.Contains(i)).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + + [Fact] + public async Task GivenATokenSearchParameterWithNotModifier_WhenSearchedOverMissingValue_ThenCorrectBundleShouldBeReturned() + { + Bundle bundle = await _client.SearchAsync(ResourceType.Observation, $"category:not=test&_tag={_fixture.FixtureTag}"); + + Observation[] expected = _fixture.Observations.Where((_, i) => i != 8).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + + [Theory] + [InlineData("code1", 0, 5, 6, 8)] + public async Task GivenMultipleTokenSearchParametersWithNotModifiers_WhenSearched_ThenCorrectBundleShouldBeReturned(string queryValue, params int[] excludeIndices) + { + Bundle bundle = await _client.SearchAsync(ResourceType.Observation, $"category:not=test&value-concept:not={queryValue}&_tag={_fixture.FixtureTag}"); + + Observation[] expected = _fixture.Observations.Where((_, i) => !excludeIndices.Contains(i)).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + + [Theory] + [InlineData(1)] + [InlineData(2)] + public async Task GivenIdWithNotModifier_WhenSearched_ThenCorrectBundleShouldBeReturned(int count) + { + Bundle bundle = await _client.SearchAsync(ResourceType.Observation, $"_id:not={string.Join(",", _fixture.Observations.Take(count).Select(x => x.Id))}&_tag={_fixture.FixtureTag}"); + + Observation[] expected = _fixture.Observations.Skip(count).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + + [Theory] + [InlineData(ResourceType.Patient)] + [InlineData(ResourceType.Patient, ResourceType.Organization)] + public async Task GivenTypeWithNotModifier_WhenSearched_ThenCorrectBundleShouldBeReturned(params ResourceType[] resourceTypes) + { + Bundle bundle = await _client.SearchAsync($"?_tag={_fixture.FixtureTag}&_type:not={string.Join(",", resourceTypes)}"); + + ImportTestHelper.VerifyBundle(bundle, _fixture.Observations.ToArray()); + } + + [Theory] + [MemberData(nameof(TokenSearchParameterData))] + public async Task GivenATokenSearchParameterWithNotModifier_WhenSearchedWithType_ThenCorrectBundleShouldBeReturned(string queryValue, params int[] excludeIndices) + { + Bundle bundle = await _client.SearchAsync($"?_type={ResourceType.Observation}&value-concept:not={queryValue}&_tag={_fixture.FixtureTag}"); + + Observation[] expected = _fixture.Observations.Where((_, i) => !excludeIndices.Contains(i)).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportUriSearchTestFixture.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportUriSearchTestFixture.cs new file mode 100644 index 0000000000..f6eb92a5c8 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportUriSearchTestFixture.cs @@ -0,0 +1,45 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + public class ImportUriSearchTestFixture : ImportTestFixture + { + public ImportUriSearchTestFixture(DataStore dataStore, Format format, TestFhirServerFactory testFhirServerFactory) + : base(dataStore, format, testFhirServerFactory) + { + } + + public IReadOnlyList ValueSets { get; private set; } + + public string FixtureTag { get; set; } + + protected override async Task OnInitializedAsync() + { + FixtureTag = Guid.NewGuid().ToString(); + + ValueSets = await ImportTestHelper.ImportToServerAsync( + TestFhirClient, + CloudStorageAccount, + vs => AddValueSet(vs, "http://somewhere.com/test/system"), + vs => AddValueSet(vs, "urn://localhost/test"), + vs => AddValueSet(vs, "http://example.org/rdf#54135-9"), + vs => AddValueSet(vs, "http://example.org/rdf#54135-9-9")); + + void AddValueSet(ValueSet vs, string url) + { + vs.Status = PublicationStatus.Active; + vs.Url = url; + vs.AddTestTag(FixtureTag); + } + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportUriSearchTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportUriSearchTests.cs new file mode 100644 index 0000000000..bb16d02fd7 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportUriSearchTests.cs @@ -0,0 +1,52 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Linq; +using System.Web; +using Hl7.Fhir.Model; +using Microsoft.Health.Fhir.Tests.Common; +using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; +using Microsoft.Health.Fhir.Tests.E2E.Common; +using Microsoft.Health.Test.Utilities; +using Xunit; +using Task = System.Threading.Tasks.Task; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + [Trait(Traits.Category, Categories.Import)] + [HttpIntegrationFixtureArgumentSets(DataStore.SqlServer, Format.Json)] + public class ImportUriSearchTests : IClassFixture + { + private readonly TestFhirClient _client; + private readonly ImportUriSearchTestFixture _fixture; + + public ImportUriSearchTests(ImportUriSearchTestFixture fixture) + { + _client = fixture.TestFhirClient; + _fixture = fixture; + } + + [Theory] + [InlineData("", "http://somewhere.com/test/system", 0)] + [InlineData("", "http://somewhere.COM/test/system")] + [InlineData("", "http://example.org/rdf#54135-9", 2)] + [InlineData("", "urn://localhost/test", 1)] + [InlineData(":above", "http://somewhere.com/test/system/123", 0)] + [InlineData(":above", "test")] + [InlineData(":above", "urn://localhost/test")] + [InlineData(":above", "http://example.org/rdf#54135-9-9-10", 2, 3)] + [InlineData(":below", "http", 0, 2, 3)] + [InlineData(":below", "test")] + [InlineData(":below", "urn")] + public async Task GivenAUriSearchParam_WhenSearched_ThenCorrectBundleShouldBeReturned(string modifier, string queryValue, params int[] expectedIndices) + { + Bundle bundle = await _client.SearchAsync(ResourceType.ValueSet, $"url{modifier}={HttpUtility.UrlEncode(queryValue)}&_tag={_fixture.FixtureTag}"); + + ValueSet[] expected = expectedIndices.Select(i => _fixture.ValueSets[i]).ToArray(); + + ImportTestHelper.VerifyBundle(bundle, expected); + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/StartupForImportTestProvider.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/StartupForImportTestProvider.cs new file mode 100644 index 0000000000..1d5acaadf3 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/StartupForImportTestProvider.cs @@ -0,0 +1,26 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Health.Fhir.Shared.Tests.E2E; +using Microsoft.Health.Fhir.Shared.Tests.E2E.Rest; + +namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Import +{ + [RequiresIsolatedDatabase] + public class StartupForImportTestProvider : StartupBaseForCustomProviders + { + public StartupForImportTestProvider(IConfiguration configuration) + : base(configuration) + { + } + + public override void ConfigureServices(IServiceCollection services) + { + base.ConfigureServices(services); + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/InProcTestFhirServer.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/InProcTestFhirServer.cs index a26dcb5e13..ea775a05c2 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/InProcTestFhirServer.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/InProcTestFhirServer.cs @@ -59,6 +59,10 @@ public InProcTestFhirServer(DataStore dataStore, Type startupType) // enable reindex for testing configuration["FhirServer:Operations:Reindex:Enabled"] = "true"; + // enable import for testing + configuration["FhirServer:Operations:Import:Enabled"] = "true"; + configuration["FhirServer:Operations:IntegrationDataStore:StorageAccountConnection"] = "UseDevelopmentStorage=true"; + if (startupType.IsDefined(typeof(RequiresIsolatedDatabaseAttribute))) { // Alter the configuration so that the server will create a new, isolated database/container. @@ -69,6 +73,8 @@ public InProcTestFhirServer(DataStore dataStore, Type startupType) var connectionStringBuilder = new SqlConnectionStringBuilder(configuration["SqlServer:ConnectionString"]); var databaseName = connectionStringBuilder.InitialCatalog += "_" + startupType.Name; configuration["SqlServer:ConnectionString"] = connectionStringBuilder.ToString(); + configuration["TaskHosting:Enabled"] = "true"; + configuration["TaskHosting:MaxRunningTaskCount"] = "2"; _cleanupDatabase = async () => { diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/DataGeneratorsTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/DataGeneratorsTests.cs new file mode 100644 index 0000000000..9f76b542e4 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/DataGeneratorsTests.cs @@ -0,0 +1,150 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System.Collections.Generic; +using System.Data; +using System.Linq; +using System.Reflection; +using Microsoft.Health.Fhir.SqlServer.Features.Operations.Import; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.SqlServer.Features.Schema.Model; +using Xunit; + +namespace Microsoft.Health.Fhir.Shared.Tests.Integration.Features.Operations.Import +{ + public class DataGeneratorsTests + { + [Fact] + public void GivenDateTimeSearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateDateTimeSearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.DateTimeSearchParam, table); + } + + [Fact] + public void GivenNumberSearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateNumberSearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.NumberSearchParam, table); + } + + [Fact] + public void GivenQuantitySearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateQuantitySearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.QuantitySearchParam, table); + } + + [Fact] + public void GivenReferenceSearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateReferenceSearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.ReferenceSearchParam, table); + } + + [Fact] + public void GivenReferenceTokenCompositeSearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateReferenceTokenCompositeSearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.ReferenceTokenCompositeSearchParam, table); + } + + [Fact] + public void GivenStringSearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateStringSearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.StringSearchParam, table); + } + + [Fact] + public void GivenTokenDateTimeCompositeSearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateTokenDateTimeCompositeSearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.TokenDateTimeCompositeSearchParam, table); + } + + [Fact] + public void GivenTokenNumberNumberCompositeSearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateTokenNumberNumberCompositeSearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.TokenNumberNumberCompositeSearchParam, table); + } + + [Fact] + public void GivenTokenQuantityCompositeSearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateTokenQuantityCompositeSearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.TokenQuantityCompositeSearchParam, table); + } + + [Fact] + public void GivenTokenSearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateTokenSearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.TokenSearchParam, table); + } + + [Fact] + public void GivenTokenStringCompositeSearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateTokenStringCompositeSearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.TokenStringCompositeSearchParam, table); + } + + [Fact] + public void GivenTokenTextSearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateTokenTextSearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.TokenText, table); + } + + [Fact] + public void GivenTokenTokenCompositeSearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateTokenTokenCompositeSearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.TokenTokenCompositeSearchParam, table); + } + + [Fact] + public void GivenUriSearchParamsRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateUriSearchParamsTable(1, 1000, 103); + ValidataDataTable(VLatest.UriSearchParam, table); + } + + [Fact] + public void GivenCompartmentAssignmentRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateCompartmentAssignmentTable(1, 1000, 103); + ValidataDataTable(VLatest.CompartmentAssignment, table); + } + + [Fact] + public void GivenResourceWriteClaimRecords_WhenGeneratorData_ThenValidDataTableShouldBeReturned() + { + DataTable table = TestBulkDataProvider.GenerateResourceWriteClaimTable(1, 1000, 103); + ValidataDataTable(VLatest.ResourceWriteClaim, table); + } + + private void ValidataDataTable(T tableDefination, DataTable dataTable) + { + Dictionary realColumnRecords = new Dictionary(); + foreach (DataColumn c in dataTable.Columns) + { + realColumnRecords[c.ColumnName] = c.DataType.ToString(); + } + + var columnFields = tableDefination.GetType().GetFields(BindingFlags.Instance | BindingFlags.NonPublic).Where(f => f.FieldType.IsAssignableTo(typeof(Column))).ToArray(); + Assert.Equal(columnFields.Length, realColumnRecords.Count); + Assert.Equal(columnFields.Length, dataTable.Rows[0].ItemArray.Length); + + foreach (FieldInfo field in columnFields) + { + Column column = (Column)field.GetValue(tableDefination); + Assert.Equal(realColumnRecords[column.Metadata.Name], column.Metadata.SqlDbType.GetGeneralType().ToString()); + } + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/SqlBulkImporterTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/SqlBulkImporterTests.cs new file mode 100644 index 0000000000..c1fd527631 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/SqlBulkImporterTests.cs @@ -0,0 +1,409 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Data; +using System.Linq; +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Features.Operations.Import; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Operations.Import; +using Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator; +using NSubstitute; +using Xunit; + +namespace Microsoft.Health.Fhir.Shared.Tests.Integration.Features.Operations.Import +{ + public class SqlBulkImporterTests + { + [Fact] + public async Task GivenSqlBulkImporter_WhenImportData_ThenAllDataShouldBeImported() + { + long expectedSucceedCount = 4321; + long expectedFailedCount = 0; + long startIndex = 0; + int maxResourceCountInBatch = 123; + int checkpointBatchCount = 345; + int maxConcurrentCount = 5; + + await VerifyBulkImporterBehaviourAsync(expectedSucceedCount, expectedFailedCount, startIndex, maxResourceCountInBatch, checkpointBatchCount, maxConcurrentCount); + } + + [Fact] + public async Task GivenSqlBulkImporter_WhenImportDataWithError_ThenAllDataAndErrorShouldBeImported() + { + long expectedSucceedCount = 2000; + long expectedFailedCount = 123; + long startIndex = 0; + int maxResourceCountInBatch = 123; + int checkpointBatchCount = 345; + int maxConcurrentCount = 5; + + await VerifyBulkImporterBehaviourAsync(expectedSucceedCount, expectedFailedCount, startIndex, maxResourceCountInBatch, checkpointBatchCount, maxConcurrentCount); + } + + [Fact] + public async Task GivenSqlBulkImporter_WhenImportDataWithAllFailed_ThenAllErrorShouldBeImported() + { + long expectedSucceedCount = 0; + long expectedFailedCount = 1234; + long startIndex = 0; + int maxResourceCountInBatch = 123; + int checkpointBatchCount = 345; + int maxConcurrentCount = 5; + + await VerifyBulkImporterBehaviourAsync(expectedSucceedCount, expectedFailedCount, startIndex, maxResourceCountInBatch, checkpointBatchCount, maxConcurrentCount); + } + + [Fact] + public async Task GivenSqlBulkImporter_WhenImportDataEqualsBatchCount_ThenAllDataAndErrorShouldBeImported() + { + long expectedSucceedCount = 10; + long expectedFailedCount = 1; + long startIndex = 0; + int maxResourceCountInBatch = 11; + int checkpointBatchCount = 11; + int maxConcurrentCount = 5; + + await VerifyBulkImporterBehaviourAsync(expectedSucceedCount, expectedFailedCount, startIndex, maxResourceCountInBatch, checkpointBatchCount, maxConcurrentCount); + } + + [Fact] + public async Task GivenSqlBulkImporter_WhenImportDataLessThanBatchCount_ThenAllDataAndErrorShouldBeImported() + { + long expectedSucceedCount = 10; + long expectedFailedCount = 1; + long startIndex = 0; + int maxResourceCountInBatch = 100; + int checkpointBatchCount = 100; + int maxConcurrentCount = 5; + + await VerifyBulkImporterBehaviourAsync(expectedSucceedCount, expectedFailedCount, startIndex, maxResourceCountInBatch, checkpointBatchCount, maxConcurrentCount); + } + + [Fact] + public async Task GivenSqlBulkImporter_WhenImportDataFromMiddle_ThenAllDataAndErrorShouldBeImported() + { + long expectedSucceedCount = 10; + long expectedFailedCount = 1; + long startIndex = 10; + int maxResourceCountInBatch = 100; + int checkpointBatchCount = 100; + int maxConcurrentCount = 5; + + await VerifyBulkImporterBehaviourAsync(expectedSucceedCount, expectedFailedCount, startIndex, maxResourceCountInBatch, checkpointBatchCount, maxConcurrentCount); + } + + [Fact] + public async Task GivenSqlBulkImporter_WhenImportData_ThenProgressUpdateShouldInSequence() + { + long expectedSucceedCount = 1000; + long expectedFailedCount = 100; + long startIndex = 10; + int maxResourceCountInBatch = 10; + int checkpointBatchCount = 1; + int maxConcurrentCount = 10; + + await VerifyBulkImporterBehaviourAsync(expectedSucceedCount, expectedFailedCount, startIndex, maxResourceCountInBatch, checkpointBatchCount, maxConcurrentCount); + } + + [Fact] + public async Task GivenSqlBulkImporter_WhenImportDataWithUnExceptedExceptionInBulkOpertation_ThenChannelShouldBeCompleteAndExceptionShouldThrow() + { + Channel inputs = Channel.CreateUnbounded(); + await inputs.Writer.WriteAsync(new ImportResource(0, 0, default(ResourceWrapper))); + inputs.Writer.Complete(); + + ISqlImportOperation testFhirDataBulkOperation = Substitute.For(); + testFhirDataBulkOperation + .BulkCopyDataAsync(Arg.Any(), Arg.Any()) + .Returns((callInfo) => + { + throw new InvalidOperationException(); + }); + testFhirDataBulkOperation + .BulkMergeResourceAsync(Arg.Any>(), Arg.Any()) + .Returns(call => + { + IEnumerable resources = (IEnumerable)call[0]; + + return resources; + }); + + IImportErrorSerializer errorSerializer = Substitute.For(); + ISqlBulkCopyDataWrapperFactory dataWrapperFactory = Substitute.For(); + dataWrapperFactory.CreateSqlBulkCopyDataWrapper(Arg.Any()) + .Returns((callInfo) => + { + ImportResource resource = (ImportResource)callInfo[0]; + return new SqlBulkCopyDataWrapper() + { + ResourceSurrogateId = resource.Id, + }; + }); + + List generators = new List() + { + new TestDataGenerator("Table1", 1), + new TestDataGenerator("Table2", 2), + }; + + IOptions operationsConfiguration = Substitute.For>(); + operationsConfiguration.Value.Returns(new OperationsConfiguration()); + + SqlResourceBulkImporter importer = new SqlResourceBulkImporter(testFhirDataBulkOperation, dataWrapperFactory, errorSerializer, generators, operationsConfiguration, NullLogger.Instance); + + List errorLogs = new List(); + IImportErrorStore importErrorStore = Substitute.For(); + (Channel progressChannel, Task importTask) = importer.Import(inputs, importErrorStore, CancellationToken.None); + + await foreach (ImportProcessingProgress progress in progressChannel.Reader.ReadAllAsync()) + { + // Do nothing... + } + + await Assert.ThrowsAsync(() => importTask); + } + + [Fact] + public async Task GivenSqlBulkImporter_WhenImportDataWithUnExceptedExceptionInErrorLogUpload_ThenChannelShouldBeCompleteAndExceptionShouldThrow() + { + Channel inputs = Channel.CreateUnbounded(); + await inputs.Writer.WriteAsync(new ImportResource(0, 0, "Error message")); + inputs.Writer.Complete(); + + ISqlImportOperation testFhirDataBulkOperation = Substitute.For(); + ISqlBulkCopyDataWrapperFactory dataWrapperFactory = Substitute.For(); + IImportErrorSerializer errorSerializer = Substitute.For(); + List generators = new List(); + + IOptions operationsConfiguration = Substitute.For>(); + operationsConfiguration.Value.Returns(new OperationsConfiguration()); + + SqlResourceBulkImporter importer = new SqlResourceBulkImporter(testFhirDataBulkOperation, dataWrapperFactory, errorSerializer, generators, operationsConfiguration, NullLogger.Instance); + + List errorLogs = new List(); + IImportErrorStore importErrorStore = Substitute.For(); + importErrorStore.UploadErrorsAsync(Arg.Any(), Arg.Any()) + .Returns((_) => throw new InvalidOperationException()); + + (Channel progressChannel, Task importTask) = importer.Import(inputs, importErrorStore, CancellationToken.None); + + await foreach (ImportProcessingProgress progress in progressChannel.Reader.ReadAllAsync()) + { + // Do nothing... + } + + await Assert.ThrowsAsync(() => importTask); + } + + [Fact] + public async Task GivenSqlBulkImporter_WhenImportDataWithUnExceptedExceptionInProcessResource_ThenChannelShouldBeCompleteAndExceptionShouldThrow() + { + Channel inputs = Channel.CreateUnbounded(); + await inputs.Writer.WriteAsync(new ImportResource(0, 0, default(ResourceWrapper))); + inputs.Writer.Complete(); + + ISqlImportOperation testFhirDataBulkOperation = Substitute.For(); + IImportErrorSerializer errorSerializer = Substitute.For(); + ISqlBulkCopyDataWrapperFactory dataWrapperFactory = Substitute.For(); + dataWrapperFactory.CreateSqlBulkCopyDataWrapper(Arg.Any()) + .Returns((callInfo) => + { + throw new InvalidOperationException(); + }); + List generators = new List(); + + IOptions operationsConfiguration = Substitute.For>(); + operationsConfiguration.Value.Returns(new OperationsConfiguration()); + + SqlResourceBulkImporter importer = new SqlResourceBulkImporter(testFhirDataBulkOperation, dataWrapperFactory, errorSerializer, generators, operationsConfiguration, NullLogger.Instance); + + List errorLogs = new List(); + IImportErrorStore importErrorStore = Substitute.For(); + + (Channel progressChannel, Task importTask) = importer.Import(inputs, importErrorStore, CancellationToken.None); + + await foreach (ImportProcessingProgress progress in progressChannel.Reader.ReadAllAsync()) + { + // Do nothing... + } + + await Assert.ThrowsAsync(() => importTask); + } + + private static async Task VerifyBulkImporterBehaviourAsync(long expectedSucceedCount, long expectedFailedCount, long startIndex, int maxResourceCountInBatch, int checkpointBatchCount, int maxConcurrentCount) + { + Channel inputs = Channel.CreateUnbounded(); + _ = Task.Run(async () => + { + long totalCount = expectedSucceedCount + expectedFailedCount; + bool[] resourceFailedRecords = new bool[totalCount]; + for (long i = 0; i < expectedFailedCount; ++i) + { + resourceFailedRecords[i] = true; + } + + resourceFailedRecords = resourceFailedRecords.OrderBy(_ => Guid.NewGuid()).ToArray(); + for (long i = 0; i < totalCount; ++i) + { + if (resourceFailedRecords[i]) + { + await inputs.Writer.WriteAsync(new ImportResource(i, i + startIndex, "Error message")); + } + else + { + await inputs.Writer.WriteAsync(new ImportResource(i, i + startIndex, CreateResourceWrapper())); + } + } + + inputs.Writer.Complete(); + }); + + await VerifyBulkImporterBehaviourAsync(inputs, expectedSucceedCount, expectedFailedCount, startIndex + expectedSucceedCount + expectedFailedCount, maxResourceCountInBatch, checkpointBatchCount, maxConcurrentCount); + } + + private static async Task VerifyBulkImporterBehaviourAsync(Channel inputs, long expectedSucceedCount, long expectedFailedCount, long expectedEndIndex, int maxResourceCountInBatch, int checkpointBatchCount, int maxConcurrentCount) + { + DataTable table1 = new DataTable(); + DataTable table2 = new DataTable(); + List importedResources = new List(); + + ISqlImportOperation testFhirDataBulkOperation = Substitute.For(); + testFhirDataBulkOperation + .When(t => t.BulkCopyDataAsync(Arg.Any(), Arg.Any())) + .Do(call => + { + DataTable table = (DataTable)call[0]; + if (table.TableName.Equals("Table1")) + { + table1.Merge(table); + } + else + { + table2.Merge(table); + } + }); + testFhirDataBulkOperation + .BulkMergeResourceAsync(Arg.Any>(), Arg.Any()) + .Returns(call => + { + IEnumerable resources = (IEnumerable)call[0]; + importedResources.AddRange(resources); + + return resources; + }); + + IImportErrorSerializer errorSerializer = Substitute.For(); + ISqlBulkCopyDataWrapperFactory dataWrapperFactory = Substitute.For(); + dataWrapperFactory.CreateSqlBulkCopyDataWrapper(Arg.Any()) + .Returns((callInfo) => + { + ImportResource resource = (ImportResource)callInfo[0]; + return new SqlBulkCopyDataWrapper() + { + ResourceSurrogateId = resource.Id, + }; + }); + + List generators = new List() + { + new TestDataGenerator("Table1", 1), + new TestDataGenerator("Table2", 2), + }; + + IOptions operationsConfiguration = Substitute.For>(); + OperationsConfiguration operationsConfig = new OperationsConfiguration(); + operationsConfig.Import.SqlBatchSizeForImportResourceOperation = maxResourceCountInBatch; + operationsConfig.Import.SqlMaxImportOperationConcurrentCount = maxConcurrentCount; + operationsConfig.Import.SqlImportBatchSizeForCheckpoint = checkpointBatchCount; + operationsConfiguration.Value.Returns(operationsConfig); + + SqlResourceBulkImporter importer = new SqlResourceBulkImporter(testFhirDataBulkOperation, dataWrapperFactory, errorSerializer, generators, operationsConfiguration, NullLogger.Instance); + + List errorLogs = new List(); + IImportErrorStore importErrorStore = Substitute.For(); + importErrorStore.When(t => t.UploadErrorsAsync(Arg.Any(), Arg.Any())) + .Do(call => + { + string[] errors = (string[])call[0]; + errorLogs.AddRange(errors); + }); + (Channel progressChannel, Task importTask) = importer.Import(inputs, importErrorStore, CancellationToken.None); + ImportProcessingProgress finalProgress = new ImportProcessingProgress(); + await foreach (ImportProcessingProgress progress in progressChannel.Reader.ReadAllAsync()) + { + Assert.True(finalProgress.CurrentIndex <= progress.CurrentIndex); + finalProgress = progress; + } + + await importTask; + + Assert.Equal(expectedSucceedCount, finalProgress.SucceedImportCount); + Assert.Equal(expectedFailedCount, finalProgress.FailedImportCount); + Assert.Equal(expectedEndIndex, finalProgress.CurrentIndex); + + Assert.Equal(expectedSucceedCount, importedResources.Count); + Assert.Equal(expectedSucceedCount, table1.Rows.Count); + Assert.Equal(expectedSucceedCount * 2, table2.Rows.Count); + Assert.Equal(expectedFailedCount, errorLogs.Count); + } + + private static ResourceWrapper CreateResourceWrapper() + { + return new ResourceWrapper( + Guid.NewGuid().ToString(), + "0", + "Dummy", + new RawResource("Dummy", Fhir.Core.Models.FhirResourceFormat.Json, true), + new ResourceRequest("POST"), + DateTimeOffset.UtcNow, + false, + null, + null, + null, + "SearchParam"); + } + + private class TestDataGenerator : TableBulkCopyDataGenerator + { + private string _tableName; + private int _subResourceCount; + + public TestDataGenerator(string tableName, int subResourceCount = 1) + { + _tableName = tableName; + _subResourceCount = subResourceCount; + } + + internal override string TableName => _tableName; + + internal override void FillDataTable(DataTable table, SqlBulkCopyDataWrapper input) + { + for (int i = 0; i < _subResourceCount; ++i) + { + DataRow newRow = table.NewRow(); + + FillColumn(newRow, "ResourceSurrogateId", input.ResourceSurrogateId); + FillColumn(newRow, "Id", Guid.NewGuid().ToString("N")); + + table.Rows.Add(newRow); + } + } + + internal override void FillSchema(DataTable table) + { + table.Columns.Add(new DataColumn("ResourceSurrogateId", typeof(long))); + table.Columns.Add(new DataColumn("Id", typeof(string))); + } + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/SqlServerFhirDataBulkOperationTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/SqlServerFhirDataBulkOperationTests.cs new file mode 100644 index 0000000000..7d3a13a35b --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/SqlServerFhirDataBulkOperationTests.cs @@ -0,0 +1,430 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Data; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Features.Persistence; +using Microsoft.Health.Fhir.SqlServer.Features.Operations.Import; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; +using Microsoft.Health.Fhir.SqlServer.Features.Storage; +using Microsoft.Health.Fhir.Tests.Integration.Persistence; +using Microsoft.Health.SqlServer.Features.Client; +using NSubstitute; +using Xunit; + +namespace Microsoft.Health.Fhir.Shared.Tests.Integration.Features.Operations.Import +{ + public class SqlServerFhirDataBulkOperationTests : IClassFixture + { + private SqlServerFhirStorageTestsFixture _fixture; + + public SqlServerFhirDataBulkOperationTests(SqlServerFhirStorageTestsFixture fixture) + { + _fixture = fixture; + } + + [Fact] + public async Task GivenBatchResources_WhenBulkCopy_ThenRecordsShouldBeAdded() + { + IOptions operationsConfiguration = Substitute.For>(); + operationsConfiguration.Value.Returns(new OperationsConfiguration()); + + SqlImportOperation sqlServerFhirDataBulkOperation = new SqlImportOperation(_fixture.SqlConnectionWrapperFactory, new TestSqlServerTransientFaultRetryPolicyFactory(), _fixture.SqlServerFhirModel, operationsConfiguration, NullLogger.Instance); + long startSurrogateId = ResourceSurrogateIdHelper.LastUpdatedToResourceSurrogateId(DateTime.Now); + int count = 1001; + short typeId = _fixture.SqlServerFhirModel.GetResourceTypeId("Patient"); + + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateDateTimeSearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateNumberSearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateQuantitySearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateReferenceSearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateReferenceTokenCompositeSearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateStringSearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenDateTimeCompositeSearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenNumberNumberCompositeSearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenQuantityCompositeSearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenSearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenStringCompositeSearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenTextSearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenTokenCompositeSearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateUriSearchParamsTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateCompartmentAssignmentTable); + await VerifyDataForBulkImport(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateResourceWriteClaimTable); + } + + [Fact] + public async Task GivenImportedBatchResources_WhenCleanData_ThenRecordsShouldBeDeleted() + { + IOptions operationsConfiguration = Substitute.For>(); + operationsConfiguration.Value.Returns(new OperationsConfiguration()); + + SqlImportOperation sqlServerFhirDataBulkOperation = new SqlImportOperation(_fixture.SqlConnectionWrapperFactory, new TestSqlServerTransientFaultRetryPolicyFactory(), _fixture.SqlServerFhirModel, operationsConfiguration, NullLogger.Instance); + long startSurrogateId = ResourceSurrogateIdHelper.LastUpdatedToResourceSurrogateId(DateTime.Now); + int count = 1001; + short typeId = _fixture.SqlServerFhirModel.GetResourceTypeId("Patient"); + + List tableNames = new List(); + + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateResourceTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateDateTimeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateNumberSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateQuantitySearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateReferenceSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateReferenceTokenCompositeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateStringSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenDateTimeCompositeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenNumberNumberCompositeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenQuantityCompositeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenStringCompositeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenTextSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenTokenCompositeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateUriSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateCompartmentAssignmentTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateResourceWriteClaimTable)); + + await sqlServerFhirDataBulkOperation.CleanBatchResourceAsync("Patient", startSurrogateId, startSurrogateId + count - 1, CancellationToken.None); + + foreach (string tableName in tableNames) + { + int rCount = await GetResourceCountAsync(tableName, startSurrogateId, startSurrogateId + count); + Assert.Equal(1, rCount); + } + } + + [Fact] + public async Task GivenImportedBatchResources_WhenCleanDataWithWrongType_ThenRecordsShouldNotBeDeleted() + { + IOptions operationsConfiguration = Substitute.For>(); + operationsConfiguration.Value.Returns(new OperationsConfiguration()); + + SqlImportOperation sqlServerFhirDataBulkOperation = new SqlImportOperation(_fixture.SqlConnectionWrapperFactory, new TestSqlServerTransientFaultRetryPolicyFactory(), _fixture.SqlServerFhirModel, operationsConfiguration, NullLogger.Instance); + long startSurrogateId = ResourceSurrogateIdHelper.LastUpdatedToResourceSurrogateId(DateTime.Now); + int count = 1001; + short typeId = _fixture.SqlServerFhirModel.GetResourceTypeId("Patient"); + + List tableNames = new List(); + + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateResourceTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateDateTimeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateNumberSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateQuantitySearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateReferenceSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateReferenceTokenCompositeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateStringSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenDateTimeCompositeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenNumberNumberCompositeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenQuantityCompositeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenStringCompositeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenTextSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateTokenTokenCompositeSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateUriSearchParamsTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateCompartmentAssignmentTable)); + tableNames.Add(await ImportDataAsync(sqlServerFhirDataBulkOperation, startSurrogateId, count, typeId, TestBulkDataProvider.GenerateResourceWriteClaimTable)); + + await sqlServerFhirDataBulkOperation.CleanBatchResourceAsync("Observation", startSurrogateId, startSurrogateId + count - 1, CancellationToken.None); + + foreach (string tableName in tableNames) + { + if (VLatest.ResourceWriteClaim.TableName.Equals(tableName)) + { + // ResourceWriteClaim do not have resource type. + continue; + } + + int rCount = await GetResourceCountAsync(tableName, startSurrogateId, startSurrogateId + count); + Assert.Equal(count, rCount); + } + } + + [Fact] + public async Task GivenDuplicateResources_WhenBulkMergeToStore_ThenOnlyDistinctResourcesImported() + { + IOptions operationsConfiguration = Substitute.For>(); + operationsConfiguration.Value.Returns(new OperationsConfiguration()); + + SqlImportOperation sqlServerFhirDataBulkOperation = new SqlImportOperation(_fixture.SqlConnectionWrapperFactory, new TestSqlServerTransientFaultRetryPolicyFactory(), _fixture.SqlServerFhirModel, operationsConfiguration, NullLogger.Instance); + long startSurrogateId = ResourceSurrogateIdHelper.LastUpdatedToResourceSurrogateId(DateTime.Now); + int count = 100; + string resourceId = Guid.NewGuid().ToString(); + + List resources = new List(); + for (int i = 0; i < count; ++i) + { + resources.Add(CreateTestResource(resourceId, startSurrogateId + i)); + } + + SqlBulkCopyDataWrapper[] result = (await sqlServerFhirDataBulkOperation.BulkMergeResourceAsync(resources, CancellationToken.None)).ToArray(); + int rCount = await GetResourceCountAsync("Resource", startSurrogateId, startSurrogateId + count); + Assert.Single(result); + Assert.Equal(1, rCount); + } + + [Fact] + public async Task GivenBatchInValidResources_WhenBulkCopy_ThenExceptionShouldBeThrow() + { + IOptions operationsConfiguration = Substitute.For>(); + operationsConfiguration.Value.Returns(new OperationsConfiguration()); + + SqlImportOperation sqlServerFhirDataBulkOperation = new SqlImportOperation(_fixture.SqlConnectionWrapperFactory, new TestSqlServerTransientFaultRetryPolicyFactory(), _fixture.SqlServerFhirModel, operationsConfiguration, NullLogger.Instance); + long startSurrogateId = ResourceSurrogateIdHelper.LastUpdatedToResourceSurrogateId(DateTime.Now); + int count = 1001; + + DataTable inputTable = TestBulkDataProvider.GenerateInValidUriSearchParamsTable(count, startSurrogateId, 0); + await Assert.ThrowsAnyAsync(async () => await sqlServerFhirDataBulkOperation.BulkCopyDataAsync(inputTable, CancellationToken.None)); + } + + [Fact] + public async Task GivenListOfResources_WhenBulkMergeToStore_ThenAllResourcesShouldBeImported() + { + IOptions operationsConfiguration = Substitute.For>(); + operationsConfiguration.Value.Returns(new OperationsConfiguration()); + + SqlImportOperation sqlServerFhirDataBulkOperation = new SqlImportOperation(_fixture.SqlConnectionWrapperFactory, new TestSqlServerTransientFaultRetryPolicyFactory(), _fixture.SqlServerFhirModel, operationsConfiguration, NullLogger.Instance); + List resources = new List(); + long startSurrogateId = ResourceSurrogateIdHelper.LastUpdatedToResourceSurrogateId(DateTime.Now); + + SqlBulkCopyDataWrapper resource1 = CreateTestResource(Guid.NewGuid().ToString(), startSurrogateId); + SqlBulkCopyDataWrapper resource2 = CreateTestResource(Guid.NewGuid().ToString(), startSurrogateId + 1); + + resources.Add(resource1); + resources.Add(resource2); + + SqlBulkCopyDataWrapper[] result = (await sqlServerFhirDataBulkOperation.BulkMergeResourceAsync(resources, CancellationToken.None)).ToArray(); + int rCount = await GetResourceCountAsync("Resource", startSurrogateId, startSurrogateId + 2); + Assert.Equal(2, result.Length); + Assert.Equal(2, rCount); + } + + [Fact] + public async Task GivenListOfResourcesWithDupResourceId_WhenBulkMergeToStore_ThenDistinctResourceShouldBeImported() + { + IOptions operationsConfiguration = Substitute.For>(); + operationsConfiguration.Value.Returns(new OperationsConfiguration()); + + SqlImportOperation sqlServerFhirDataBulkOperation = new SqlImportOperation(_fixture.SqlConnectionWrapperFactory, new TestSqlServerTransientFaultRetryPolicyFactory(), _fixture.SqlServerFhirModel, operationsConfiguration, NullLogger.Instance); + List resources = new List(); + long startSurrogateId = ResourceSurrogateIdHelper.LastUpdatedToResourceSurrogateId(DateTime.Now); + + SqlBulkCopyDataWrapper resource1 = CreateTestResource(Guid.NewGuid().ToString(), startSurrogateId); + SqlBulkCopyDataWrapper resource2 = CreateTestResource(Guid.NewGuid().ToString(), startSurrogateId + 1); + + resources.Add(resource1); + resources.Add(resource2); + + SqlBulkCopyDataWrapper[] result = (await sqlServerFhirDataBulkOperation.BulkMergeResourceAsync(resources, CancellationToken.None)).ToArray(); + int rCount = await GetResourceCountAsync("Resource", startSurrogateId, startSurrogateId + 2); + Assert.Equal(2, result.Count()); + Assert.Equal(2, rCount); + + resource1.ResourceSurrogateId = startSurrogateId + 2; + resource1.ResourceSurrogateId = startSurrogateId + 3; + result = (await sqlServerFhirDataBulkOperation.BulkMergeResourceAsync(resources, CancellationToken.None)).ToArray(); + rCount = await GetResourceCountAsync("Resource", startSurrogateId, startSurrogateId + 4); + Assert.Empty(result); + Assert.Equal(2, rCount); + } + + [Fact] + public async Task GivenListOfResources_WhenBulkMergeToStoreTwice_ThenSecondMergeShouldFail() + { + IOptions operationsConfiguration = Substitute.For>(); + operationsConfiguration.Value.Returns(new OperationsConfiguration()); + + SqlImportOperation sqlServerFhirDataBulkOperation = new SqlImportOperation(_fixture.SqlConnectionWrapperFactory, new TestSqlServerTransientFaultRetryPolicyFactory(), _fixture.SqlServerFhirModel, operationsConfiguration, NullLogger.Instance); + List resources = new List(); + long startSurrogateId = ResourceSurrogateIdHelper.LastUpdatedToResourceSurrogateId(DateTime.Now); + + string resourceId = Guid.NewGuid().ToString(); + SqlBulkCopyDataWrapper resource1 = CreateTestResource(resourceId, startSurrogateId); + SqlBulkCopyDataWrapper resource2 = CreateTestResource(resourceId, startSurrogateId + 1); + + resources.Add(resource1); + resources.Add(resource2); + + SqlBulkCopyDataWrapper[] result = (await sqlServerFhirDataBulkOperation.BulkMergeResourceAsync(resources, CancellationToken.None)).ToArray(); + int rCount = await GetResourceCountAsync("Resource", startSurrogateId, startSurrogateId + 2); + Assert.Single(result); + Assert.Equal(1, rCount); + } + + [Fact] + public async Task GivenUnclusteredIndexes_WhenRebuildIndexes_ThenOnlyDisabledIndexShouldBeBuilt() + { + IOptions operationsConfiguration = Substitute.For>(); + operationsConfiguration.Value.Returns(new OperationsConfiguration()); + + SqlImportOperation sqlServerFhirDataBulkOperation = new SqlImportOperation(_fixture.SqlConnectionWrapperFactory, new TestSqlServerTransientFaultRetryPolicyFactory(), _fixture.SqlServerFhirModel, operationsConfiguration, NullLogger.Instance); + + (string tableName, string indexName)[] indexes = SqlImportOperation.OptionalIndexesForImport.Select(indexRecord => (indexRecord.table.TableName, indexRecord.index.IndexName)).ToArray(); + foreach (var index in indexes) + { + await DisableIndex(index.tableName, index.indexName); + bool isDisabled = await GetIndexDisableStatus(index.indexName); + Assert.True(isDisabled); + + bool isExecuted = await RebuildIndex(index.tableName, index.indexName); + isDisabled = await GetIndexDisableStatus(index.indexName); + Assert.False(isDisabled); + Assert.True(isExecuted); + + isExecuted = await RebuildIndex(index.tableName, index.indexName); + isDisabled = await GetIndexDisableStatus(index.indexName); + Assert.False(isDisabled); + Assert.False(isExecuted); + } + } + + [Fact] + public async Task GivenUnclusteredIndexes_WhenDisableIndexes_ThenOnlyBuiltIndexShouldBeDisabled() + { + IOptions operationsConfiguration = Substitute.For>(); + operationsConfiguration.Value.Returns(new OperationsConfiguration()); + + SqlImportOperation sqlServerFhirDataBulkOperation = new SqlImportOperation(_fixture.SqlConnectionWrapperFactory, new TestSqlServerTransientFaultRetryPolicyFactory(), _fixture.SqlServerFhirModel, operationsConfiguration, NullLogger.Instance); + + (string tableName, string indexName)[] indexes = SqlImportOperation.OptionalIndexesForImport.Select(indexRecord => (indexRecord.table.TableName, indexRecord.index.IndexName)).ToArray(); + foreach (var index in indexes) + { + await RebuildIndex(index.tableName, index.indexName); + bool isDisabled = await GetIndexDisableStatus(index.indexName); + Assert.False(isDisabled); + + bool isExecuted = await DisableIndex(index.tableName, index.indexName); + isDisabled = await GetIndexDisableStatus(index.indexName); + Assert.True(isDisabled); + Assert.True(isExecuted); + + isExecuted = await DisableIndex(index.tableName, index.indexName); + isDisabled = await GetIndexDisableStatus(index.indexName); + Assert.True(isDisabled); + Assert.False(isExecuted); + } + } + + private static SqlBulkCopyDataWrapper CreateTestResource(string resourceId, long surrogateId) + { + SqlBulkCopyDataWrapper resource = new SqlBulkCopyDataWrapper(); + resource.Resource = + new ResourceWrapper( + resourceId, + "0", + "Dummy", + new RawResource("Test", Fhir.Core.Models.FhirResourceFormat.Json, true), + new ResourceRequest("PUT"), + DateTimeOffset.UtcNow, + false, + null, + null, + null, + "SearchParam"); + resource.ResourceSurrogateId = surrogateId; + resource.ResourceTypeId = 0; + resource.BulkImportResource = new BulkImportResourceTypeV1Row(0, resourceId, 0, false, surrogateId, false, "POST", new MemoryStream(Encoding.UTF8.GetBytes("Test")), true, "Test"); + return resource; + } + + private async Task GetIndexDisableStatus(string indexName) + { + SqlConnectionWrapperFactory factory = _fixture.SqlConnectionWrapperFactory; + using (SqlConnectionWrapper connection = await factory.ObtainSqlConnectionWrapperAsync(CancellationToken.None)) + using (SqlCommandWrapper command = connection.CreateSqlCommand()) + { + command.CommandText = $"select is_disabled from sys.indexes where name = '{indexName}'"; + + return (bool)(await command.ExecuteScalarAsync(CancellationToken.None)); + } + } + + private async Task RebuildIndex(string tableName, string indexName) + { + SqlConnectionWrapperFactory factory = _fixture.SqlConnectionWrapperFactory; + using (SqlConnectionWrapper sqlConnectionWrapper = await factory.ObtainSqlConnectionWrapperAsync(CancellationToken.None)) + using (SqlCommandWrapper sqlCommandWrapper = sqlConnectionWrapper.CreateSqlCommand()) + { + VLatest.RebuildIndex.PopulateCommand(sqlCommandWrapper, tableName, indexName); + var returnParameter = sqlCommandWrapper.Parameters.Add("@ReturnVal", SqlDbType.Int); + returnParameter.Direction = ParameterDirection.ReturnValue; + + await sqlCommandWrapper.ExecuteNonQueryAsync(CancellationToken.None); + bool isExecuted = Convert.ToBoolean(returnParameter.Value); + + return isExecuted; + } + } + + private async Task DisableIndex(string tableName, string indexName) + { + SqlConnectionWrapperFactory factory = _fixture.SqlConnectionWrapperFactory; + using (SqlConnectionWrapper sqlConnectionWrapper = await factory.ObtainSqlConnectionWrapperAsync(CancellationToken.None)) + using (SqlCommandWrapper sqlCommandWrapper = sqlConnectionWrapper.CreateSqlCommand()) + { + VLatest.DisableIndex.PopulateCommand(sqlCommandWrapper, tableName, indexName); + var returnParameter = sqlCommandWrapper.Parameters.Add("@ReturnVal", SqlDbType.Int); + returnParameter.Direction = ParameterDirection.ReturnValue; + + await sqlCommandWrapper.ExecuteNonQueryAsync(CancellationToken.None); + bool isExecuted = Convert.ToBoolean(returnParameter.Value); + + return isExecuted; + } + } + + private async Task VerifyDataForBulkImport(SqlImportOperation sqlServerFhirDataBulkOperation, long startSurrogateId, int count, short resourceTypeId, Func tableGenerator, string resourceId = null) + { + DataTable inputTable = tableGenerator(count, startSurrogateId, resourceTypeId, resourceId); + await sqlServerFhirDataBulkOperation.BulkCopyDataAsync(inputTable, CancellationToken.None); + await CheckTableDataAsync(inputTable, startSurrogateId, startSurrogateId + count); + } + + private async Task ImportDataAsync(SqlImportOperation sqlServerFhirDataBulkOperation, long startSurrogateId, int count, short resourceTypeId, Func tableGenerator, string resourceId = null) + { + DataTable inputTable = tableGenerator(count, startSurrogateId, resourceTypeId, resourceId); + await sqlServerFhirDataBulkOperation.BulkCopyDataAsync(inputTable, CancellationToken.None); + + return inputTable.TableName; + } + + private async Task GetResourceCountAsync(string tableName, long startSurrogateId, long endSurrogateId) + { + SqlConnectionWrapperFactory factory = _fixture.SqlConnectionWrapperFactory; + using SqlConnectionWrapper connection = await factory.ObtainSqlConnectionWrapperAsync(CancellationToken.None); + using SqlCommandWrapper command = connection.CreateSqlCommand(); + command.CommandText = $"select count(*) from {tableName} where ResourceSurrogateId >= {startSurrogateId} and ResourceSurrogateId < {endSurrogateId}"; + + return (int)(await command.ExecuteScalarAsync(CancellationToken.None)); + } + + private async Task CheckTableDataAsync(DataTable table, long startSurrogateId, long endSurrogateId) + { + SqlConnectionWrapperFactory factory = _fixture.SqlConnectionWrapperFactory; + using SqlConnectionWrapper connection = await factory.ObtainSqlConnectionWrapperAsync(CancellationToken.None); + using SqlDataAdapter adapter = new SqlDataAdapter(); + + DataColumn[] columns = new DataColumn[table.Columns.Count]; + table.Columns.CopyTo(columns, 0); + string columnsString = string.Join(',', columns.Select(c => c.ColumnName)); + string queryText = $"select {columnsString} from {table.TableName} where ResourceSurrogateId >= {startSurrogateId} and ResourceSurrogateId < {endSurrogateId}"; + adapter.SelectCommand = new SqlCommand(queryText, connection.SqlConnection); + + DataSet result = new DataSet(); + adapter.Fill(result); + + Assert.Equal(columns.Length, result.Tables[0].Columns.Count); + Assert.Equal(table.Rows.Count, result.Tables[0].Rows.Count); + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/TestBulkDataProvider.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/TestBulkDataProvider.cs new file mode 100644 index 0000000000..3eab3a88ac --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/TestBulkDataProvider.cs @@ -0,0 +1,267 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Data; +using Microsoft.Health.Fhir.SqlServer.Features.Operations.Import.DataGenerator; +using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; + +namespace Microsoft.Health.Fhir.Shared.Tests.Integration.Features.Operations.Import +{ + public static class TestBulkDataProvider + { + public static DataTable GenerateResourceTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + ResourceTableBulkCopyDataGenerator generator = new ResourceTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + ResourceTableBulkCopyDataGenerator.FillDataTable(result, resoureType, (resourceId ?? Guid.NewGuid().ToString()) + i.ToString(), startSurrogatedId + i, new byte[10], string.Empty); + } + + return result; + } + + public static DataTable GenerateDateTimeSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + DateTimeSearchParamsTableBulkCopyDataGenerator generator = new DateTimeSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + DateTimeSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkDateTimeSearchParamTableTypeV1Row(0, 0, default(DateTimeOffset), default(DateTimeOffset), true)); + } + + return result; + } + + public static DataTable GenerateNumberSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + NumberSearchParamsTableBulkCopyDataGenerator generator = new NumberSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + NumberSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkNumberSearchParamTableTypeV1Row(0, 0, 1, 1, 1)); + } + + return result; + } + + public static DataTable GenerateQuantitySearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + QuantitySearchParamsTableBulkCopyDataGenerator generator = new QuantitySearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + QuantitySearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkQuantitySearchParamTableTypeV1Row(0, 0, 1, 1, 1, 1, 1)); + } + + return result; + } + + public static DataTable GenerateReferenceSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + ReferenceSearchParamsTableBulkCopyDataGenerator generator = new ReferenceSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + ReferenceSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkReferenceSearchParamTableTypeV1Row(0, 0, string.Empty, 1, string.Empty, 1)); + } + + return result; + } + + public static DataTable GenerateReferenceTokenCompositeSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + ReferenceTokenCompositeSearchParamsTableBulkCopyDataGenerator generator = new ReferenceTokenCompositeSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + ReferenceTokenCompositeSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkReferenceTokenCompositeSearchParamTableTypeV1Row(0, 0, string.Empty, 1, string.Empty, 1, 1, string.Empty)); + } + + return result; + } + + public static DataTable GenerateStringSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + StringSearchParamsTableBulkCopyDataGenerator generator = new StringSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + StringSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkStringSearchParamTableTypeV1Row(0, 0, string.Empty, string.Empty)); + } + + return result; + } + + public static DataTable GenerateTokenDateTimeCompositeSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + TokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator generator = new TokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + TokenDateTimeCompositeSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkTokenDateTimeCompositeSearchParamTableTypeV1Row(0, 0, 1, string.Empty, default(DateTimeOffset), default(DateTimeOffset), true)); + } + + return result; + } + + public static DataTable GenerateTokenNumberNumberCompositeSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + TokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator generator = new TokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + TokenNumberNumberCompositeSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkTokenNumberNumberCompositeSearchParamTableTypeV1Row(0, 0, 1, string.Empty, 0, 0, 0, 0, 0, 0, true)); + } + + return result; + } + + public static DataTable GenerateTokenQuantityCompositeSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + TokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator generator = new TokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + TokenQuantityCompositeSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkTokenQuantityCompositeSearchParamTableTypeV1Row(0, 0, 0, string.Empty, 0, 0, 0, 0, 0)); + } + + return result; + } + + public static DataTable GenerateTokenSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + TokenSearchParamsTableBulkCopyDataGenerator generator = new TokenSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + TokenSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkTokenSearchParamTableTypeV1Row(0, 0, 0, string.Empty)); + } + + return result; + } + + public static DataTable GenerateTokenStringCompositeSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + TokenStringCompositeSearchParamsTableBulkCopyDataGenerator generator = new TokenStringCompositeSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + TokenStringCompositeSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkTokenStringCompositeSearchParamTableTypeV1Row(0, 0, 0, string.Empty, string.Empty, string.Empty)); + } + + return result; + } + + public static DataTable GenerateTokenTextSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + TokenTextSearchParamsTableBulkCopyDataGenerator generator = new TokenTextSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + TokenTextSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkTokenTextTableTypeV1Row(0, 0, string.Empty)); + } + + return result; + } + + public static DataTable GenerateTokenTokenCompositeSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + TokenTokenCompositeSearchParamsTableBulkCopyDataGenerator generator = new TokenTokenCompositeSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + TokenTokenCompositeSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkTokenTokenCompositeSearchParamTableTypeV1Row(0, 0, 0, string.Empty, 0, string.Empty)); + } + + return result; + } + + public static DataTable GenerateUriSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + UriSearchParamsTableBulkCopyDataGenerator generator = new UriSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + UriSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkUriSearchParamTableTypeV1Row(default, 0, string.Empty)); + } + + return result; + } + + public static DataTable GenerateCompartmentAssignmentTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + CompartmentAssignmentTableBulkCopyDataGenerator generator = new CompartmentAssignmentTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + CompartmentAssignmentTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkCompartmentAssignmentTableTypeV1Row(0, 1, string.Empty)); + } + + return result; + } + + public static DataTable GenerateResourceWriteClaimTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + ResourceWriteClaimTableBulkCopyDataGenerator generator = new ResourceWriteClaimTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + ResourceWriteClaimTableBulkCopyDataGenerator.FillDataTable(result, startSurrogatedId + i, new BulkResourceWriteClaimTableTypeV1Row(0, 1, string.Empty)); + } + + return result; + } + + public static DataTable GenerateInValidUriSearchParamsTable(int count, long startSurrogatedId, short resoureType, string resourceId = null) + { + UriSearchParamsTableBulkCopyDataGenerator generator = new UriSearchParamsTableBulkCopyDataGenerator(); + + DataTable result = generator.GenerateDataTable(); + + for (int i = 0; i < count; ++i) + { + UriSearchParamsTableBulkCopyDataGenerator.FillDataTable(result, resoureType, startSurrogatedId + i, new BulkUriSearchParamTableTypeV1Row(default, 0, null)); + } + + return result; + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/TestSqlServerTransientFaultRetryPolicyFactory.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/TestSqlServerTransientFaultRetryPolicyFactory.cs new file mode 100644 index 0000000000..2d4226d744 --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Import/TestSqlServerTransientFaultRetryPolicyFactory.cs @@ -0,0 +1,18 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using Microsoft.Health.SqlServer.Features.Client; +using Polly; + +namespace Microsoft.Health.Fhir.Shared.Tests.Integration.Features.Operations.Import +{ + public class TestSqlServerTransientFaultRetryPolicyFactory : ISqlServerTransientFaultRetryPolicyFactory + { + public IAsyncPolicy Create() + { + return Policy.TimeoutAsync(60); + } + } +} diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Microsoft.Health.Fhir.Shared.Tests.Integration.projitems b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Microsoft.Health.Fhir.Shared.Tests.Integration.projitems index 6817c3c85f..1346c03465 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Microsoft.Health.Fhir.Shared.Tests.Integration.projitems +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Microsoft.Health.Fhir.Shared.Tests.Integration.projitems @@ -16,8 +16,13 @@ + + + + + diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/FhirStorageTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/FhirStorageTests.cs index 4936e4fc6f..75765ccc58 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/FhirStorageTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/FhirStorageTests.cs @@ -6,7 +6,6 @@ using System; using System.Collections.Generic; using System.Linq; -using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; @@ -30,7 +29,6 @@ using Microsoft.Health.Fhir.Tests.Common; using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; using Microsoft.Health.Test.Utilities; -using NSubstitute; using Xunit; using Task = System.Threading.Tasks.Task; diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerFhirStorageTestsFixture.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerFhirStorageTestsFixture.cs index 3e932b3cd9..42d0ec8fe0 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerFhirStorageTestsFixture.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerFhirStorageTestsFixture.cs @@ -106,6 +106,7 @@ internal SqlServerFhirStorageTestsFixture(int maximumSupportedSchemaVersion, str sqlConnectionStringProvider, Substitute.For(), NullLogger.Instance); + SqlServerFhirModel = sqlServerFhirModel; var searchParameterToSearchValueTypeMap = new SearchParameterToSearchValueTypeMap(); @@ -151,6 +152,7 @@ internal SqlServerFhirStorageTestsFixture(int maximumSupportedSchemaVersion, str bulkReindexResourceTvpGenerator, options, SqlConnectionWrapperFactory, + new CompressedRawResourceConverter(), NullLogger.Instance, schemaInformation); @@ -188,6 +190,7 @@ internal SqlServerFhirStorageTestsFixture(int maximumSupportedSchemaVersion, str SqlConnectionWrapperFactory, schemaInformation, fhirRequestContextAccessor, + new CompressedRawResourceConverter(), NullLogger.Instance); ISearchParameterSupportResolver searchParameterSupportResolver = Substitute.For(); @@ -213,6 +216,8 @@ internal SqlServerFhirStorageTestsFixture(int maximumSupportedSchemaVersion, str internal SqlServerSearchParameterStatusDataStore SqlServerSearchParameterStatusDataStore { get; } + internal SqlServerFhirModel SqlServerFhirModel { get; } + public async Task InitializeAsync() { await _testHelper.CreateAndInitializeDatabase(_databaseName, _maximumSupportedSchemaVersion, forceIncrementalSchemaUpgrade: false, _schemaInitializer, CancellationToken.None); diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerSchemaUpgradeTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerSchemaUpgradeTests.cs index 213da925b8..d7296fa3da 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerSchemaUpgradeTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerSchemaUpgradeTests.cs @@ -199,6 +199,8 @@ private bool CompareDatabaseSchemas(string databaseName1, string databaseName2) ("Procedure", "[dbo].[UpsertResource]"), ("Procedure", "[dbo].[UpsertResource_2]"), ("Procedure", "[dbo].[UpsertResource_3]"), + ("Procedure", "[dbo].[CreateTask]"), + ("Procedure", "[dbo].[GetNextTask]"), ("Procedure", "[dbo].[HardDeleteResource]"), ("TableType", "[dbo].[ReferenceSearchParamTableType_1]"), ("TableType", "[dbo].[ReferenceTokenCompositeSearchParamTableType_1]"), diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerTaskConsumerTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerTaskConsumerTests.cs index c5af56ecec..aa5c4f331c 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerTaskConsumerTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerTaskConsumerTests.cs @@ -21,6 +21,7 @@ namespace Microsoft.Health.Fhir.Tests.Integration.Persistence { public class SqlServerTaskConsumerTests : IClassFixture { + private const short SqlServerTaskConsumerTestsTypeId = 101; private SqlServerFhirStorageTestsFixture _fixture; public SqlServerTaskConsumerTests(SqlServerFhirStorageTestsFixture fixture) @@ -47,18 +48,17 @@ public async Task GivenListOfTasksInQueue_WhenGetNextTask_ThenAvailableTasksShou for (int i = 0; i < 5; ++i) { string taskId = Guid.NewGuid().ToString(); - short typeId = 1; string inputData = "inputData"; TaskInfo taskInfo = new TaskInfo() { TaskId = taskId, QueueId = queueId, - TaskTypeId = typeId, + TaskTypeId = SqlServerTaskConsumerTestsTypeId, InputData = inputData, }; - _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, CancellationToken.None); + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); } var result = (await sqlServerTaskConsumer.GetNextMessagesAsync(3, 60, CancellationToken.None)).ToList(); @@ -82,18 +82,17 @@ public async Task GivenACompletedTask_WhenGetNextTask_ThenNoTaskShouldBeReturned SqlServerTaskConsumer sqlServerTaskConsumer = new SqlServerTaskConsumer(taskHostingConfig, _fixture.SqlConnectionWrapperFactory, NullLogger.Instance); string taskId = Guid.NewGuid().ToString(); - short typeId = 1; string inputData = "inputData"; TaskInfo taskInfo = new TaskInfo() { TaskId = taskId, QueueId = queueId, - TaskTypeId = typeId, + TaskTypeId = SqlServerTaskConsumerTestsTypeId, InputData = inputData, }; - _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, CancellationToken.None); + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); taskInfo = (await sqlServerTaskConsumer.GetNextMessagesAsync(1, 60, CancellationToken.None)).First(); TaskResultData result = new TaskResultData(TaskResult.Success, "Result"); @@ -122,18 +121,17 @@ public async Task GivenARunningTask_WhenGetNextTask_ThenNoTaskShouldBeReturned() SqlServerTaskConsumer sqlServerTaskConsumer = new SqlServerTaskConsumer(taskHostingConfig, _fixture.SqlConnectionWrapperFactory, NullLogger.Instance); string taskId = Guid.NewGuid().ToString(); - short typeId = 1; string inputData = "inputData"; TaskInfo taskInfo = new TaskInfo() { TaskId = taskId, QueueId = queueId, - TaskTypeId = typeId, + TaskTypeId = SqlServerTaskConsumerTestsTypeId, InputData = inputData, }; - _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, CancellationToken.None); + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); _ = await sqlServerTaskConsumer.GetNextMessagesAsync(1, 60, CancellationToken.None); taskInfo = (await sqlServerTaskConsumer.GetNextMessagesAsync(1, 60, CancellationToken.None)).FirstOrDefault(); @@ -157,18 +155,17 @@ public async Task GivenARunningTaskTimeout_WhenGetNextTask_ThenTaskShouldBeRetur SqlServerTaskConsumer sqlServerTaskConsumer = new SqlServerTaskConsumer(taskHostingConfig, _fixture.SqlConnectionWrapperFactory, NullLogger.Instance); string taskId = Guid.NewGuid().ToString(); - short typeId = 1; string inputData = "inputData"; TaskInfo taskInfo = new TaskInfo() { TaskId = taskId, QueueId = queueId, - TaskTypeId = typeId, + TaskTypeId = SqlServerTaskConsumerTestsTypeId, InputData = inputData, }; - _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, CancellationToken.None); + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); _ = await sqlServerTaskConsumer.GetNextMessagesAsync(1, 60, CancellationToken.None); await Task.Delay(TimeSpan.FromSeconds(3)); @@ -193,18 +190,17 @@ public async Task GivenARunningTask_WhenResetTask_ThenTaskShouldBeReturned() SqlServerTaskConsumer sqlServerTaskConsumer = new SqlServerTaskConsumer(taskHostingConfig, _fixture.SqlConnectionWrapperFactory, NullLogger.Instance); string taskId = Guid.NewGuid().ToString(); - short typeId = 1; string inputData = "inputData"; TaskInfo taskInfo = new TaskInfo() { TaskId = taskId, QueueId = queueId, - TaskTypeId = typeId, + TaskTypeId = SqlServerTaskConsumerTestsTypeId, InputData = inputData, }; - _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, CancellationToken.None); + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); taskInfo = (await sqlServerTaskConsumer.GetNextMessagesAsync(1, 60, CancellationToken.None)).First(); string firstRunId = taskInfo.RunId; @@ -234,18 +230,17 @@ public async Task GivenARunningTask_WhenCompleteTask_ThenTaskStatusShouldBeChang SqlServerTaskConsumer sqlServerTaskConsumer = new SqlServerTaskConsumer(taskHostingConfig, _fixture.SqlConnectionWrapperFactory, NullLogger.Instance); string taskId = Guid.NewGuid().ToString(); - short typeId = 1; string inputData = "inputData"; TaskInfo taskInfo = new TaskInfo() { TaskId = taskId, QueueId = queueId, - TaskTypeId = typeId, + TaskTypeId = SqlServerTaskConsumerTestsTypeId, InputData = inputData, }; - _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, CancellationToken.None); + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); taskInfo = (await sqlServerTaskConsumer.GetNextMessagesAsync(1, 60, CancellationToken.None)).First(); TaskResultData result = new TaskResultData(TaskResult.Success, "Result"); @@ -271,19 +266,18 @@ public async Task GivenARunningTask_WhenReachMaxRetryCount_ThenResetShouldFail() SqlServerTaskConsumer sqlServerTaskConsumer = new SqlServerTaskConsumer(taskHostingConfig, _fixture.SqlConnectionWrapperFactory, NullLogger.Instance); string taskId = Guid.NewGuid().ToString(); - short typeId = 1; string inputData = "inputData"; TaskInfo taskInfo = new TaskInfo() { TaskId = taskId, QueueId = queueId, - TaskTypeId = typeId, + TaskTypeId = SqlServerTaskConsumerTestsTypeId, InputData = inputData, MaxRetryCount = 1, }; - _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, CancellationToken.None); + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); TaskResultData result = new TaskResultData(TaskResult.Fail, "Result"); taskInfo = (await sqlServerTaskConsumer.GetNextMessagesAsync(1, 60, CancellationToken.None)).First(); @@ -313,19 +307,18 @@ public async Task GivenCompletedTask_WhenResetTask_ThenResetShouldFail() SqlServerTaskConsumer sqlServerTaskConsumer = new SqlServerTaskConsumer(taskHostingConfig, _fixture.SqlConnectionWrapperFactory, NullLogger.Instance); string taskId = Guid.NewGuid().ToString(); - short typeId = 1; string inputData = "inputData"; TaskInfo taskInfo = new TaskInfo() { TaskId = taskId, QueueId = queueId, - TaskTypeId = typeId, + TaskTypeId = SqlServerTaskConsumerTestsTypeId, InputData = inputData, MaxRetryCount = 1, }; - _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, CancellationToken.None); + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); TaskResultData result = new TaskResultData(TaskResult.Fail, "Result"); taskInfo = (await sqlServerTaskConsumer.GetNextMessagesAsync(1, 60, CancellationToken.None)).First(); @@ -350,19 +343,18 @@ public async Task GivenARunningTask_WhenUpdateWithWrongRunid_ThenExceptionShould SqlServerTaskConsumer sqlServerTaskConsumer = new SqlServerTaskConsumer(taskHostingConfig, _fixture.SqlConnectionWrapperFactory, NullLogger.Instance); string taskId = Guid.NewGuid().ToString(); - short typeId = 1; string inputData = "inputData"; TaskInfo taskInfo = new TaskInfo() { TaskId = taskId, QueueId = queueId, - TaskTypeId = typeId, + TaskTypeId = SqlServerTaskConsumerTestsTypeId, InputData = inputData, MaxRetryCount = 1, }; - _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, CancellationToken.None); + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); TaskResultData result = new TaskResultData(TaskResult.Fail, "Result"); taskInfo = (await sqlServerTaskConsumer.GetNextMessagesAsync(1, 60, CancellationToken.None)).First(); @@ -370,5 +362,40 @@ public async Task GivenARunningTask_WhenUpdateWithWrongRunid_ThenExceptionShould await Assert.ThrowsAsync(async () => await sqlServerTaskConsumer.CompleteAsync(taskInfo.TaskId, result, "invalid", CancellationToken.None)); await Assert.ThrowsAsync(async () => await sqlServerTaskConsumer.ResetAsync(taskInfo.TaskId, result, "invalid", CancellationToken.None)); } + + [Fact] + public async Task GivenATaskCreated_WhenTaskCanceled_ThenCanBePickedUp() + { + string queueId = Guid.NewGuid().ToString(); + TaskHostingConfiguration config = new TaskHostingConfiguration() + { + Enabled = true, + QueueId = queueId, + TaskHeartbeatTimeoutThresholdInSeconds = 60, + }; + + IOptions taskHostingConfig = Substitute.For>(); + taskHostingConfig.Value.Returns(config); + SqlServerTaskManager sqlServerTaskManager = new SqlServerTaskManager(_fixture.SqlConnectionWrapperFactory, NullLogger.Instance); + SqlServerTaskConsumer sqlServerTaskConsumer = new SqlServerTaskConsumer(taskHostingConfig, _fixture.SqlConnectionWrapperFactory, NullLogger.Instance); + + string taskId = Guid.NewGuid().ToString(); + string inputData = "inputData"; + + TaskInfo taskInfo = new TaskInfo() + { + TaskId = taskId, + QueueId = queueId, + TaskTypeId = SqlServerTaskConsumerTestsTypeId, + InputData = inputData, + MaxRetryCount = 1, + }; + + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); + _ = await sqlServerTaskManager.CancelTaskAsync(taskInfo.TaskId, CancellationToken.None); + + var taskInfoResult = (await sqlServerTaskConsumer.GetNextMessagesAsync(1, 60, CancellationToken.None)).First(); + Assert.Equal(taskInfo.TaskId, taskInfoResult.TaskId); + } } } diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerTaskManagerTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerTaskManagerTests.cs index 07f110368d..7e20599db4 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerTaskManagerTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerTaskManagerTests.cs @@ -7,8 +7,11 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Microsoft.Health.Fhir.Core.Configs; using Microsoft.Health.Fhir.SqlServer.Features.Storage; using Microsoft.Health.TaskManagement; +using NSubstitute; using Xunit; using TaskStatus = Microsoft.Health.TaskManagement.TaskStatus; @@ -16,6 +19,7 @@ namespace Microsoft.Health.Fhir.Tests.Integration.Persistence { public class SqlServerTaskManagerTests : IClassFixture { + private const short SqlServerTaskManagerTestsTypeId = 100; private SqlServerFhirStorageTestsFixture _fixture; public SqlServerTaskManagerTests(SqlServerFhirStorageTestsFixture fixture) @@ -28,23 +32,22 @@ public async Task GivenASqlTaskManager_WhenCreateTask_ThenNewTaskShouldBeCreated { string queueId = Guid.NewGuid().ToString(); string taskId = Guid.NewGuid().ToString(); - short typeId = 1; string inputData = "inputData"; TaskInfo taskInfo = new TaskInfo() { TaskId = taskId, QueueId = queueId, - TaskTypeId = typeId, + TaskTypeId = SqlServerTaskManagerTestsTypeId, InputData = inputData, }; SqlServerTaskManager sqlServerTaskManager = new SqlServerTaskManager(_fixture.SqlConnectionWrapperFactory, NullLogger.Instance); - taskInfo = await sqlServerTaskManager.CreateTaskAsync(taskInfo, CancellationToken.None); + taskInfo = await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); Assert.Equal(queueId, taskInfo.QueueId); Assert.Equal(taskId, taskInfo.TaskId); - Assert.Equal(typeId, taskInfo.TaskTypeId); + Assert.Equal(SqlServerTaskManagerTestsTypeId, taskInfo.TaskTypeId); Assert.Equal(inputData, taskInfo.InputData); Assert.Equal(TaskStatus.Queued, taskInfo.Status); Assert.NotNull(taskInfo.HeartbeatDateTime); @@ -57,7 +60,7 @@ public async Task GivenASqlTaskManager_WhenCreateTask_ThenNewTaskShouldBeCreated taskInfo = await sqlServerTaskManager.GetTaskAsync(taskId, CancellationToken.None); Assert.Equal(queueId, taskInfo.QueueId); Assert.Equal(taskId, taskInfo.TaskId); - Assert.Equal(typeId, taskInfo.TaskTypeId); + Assert.Equal(SqlServerTaskManagerTestsTypeId, taskInfo.TaskTypeId); Assert.Equal(inputData, taskInfo.InputData); Assert.Equal(TaskStatus.Queued, taskInfo.Status); Assert.NotNull(taskInfo.HeartbeatDateTime); @@ -68,26 +71,66 @@ public async Task GivenASqlTaskManager_WhenCreateTask_ThenNewTaskShouldBeCreated Assert.Null(taskInfo.Result); } + [Fact] + public async Task GivenActiveTasks_WhenCreateWithSameTypeRunningTask_ThenConflictExceptionShouldBeThrow() + { + string queueId = Guid.NewGuid().ToString(); + string taskId1 = Guid.NewGuid().ToString(); + string taskId2 = Guid.NewGuid().ToString(); + string inputData = "inputData"; + + TaskHostingConfiguration config = new TaskHostingConfiguration() + { + Enabled = true, + QueueId = queueId, + TaskHeartbeatTimeoutThresholdInSeconds = 60, + }; + + short conflictTestTypeId = 1000; + + IOptions taskHostingConfig = Substitute.For>(); + taskHostingConfig.Value.Returns(config); + SqlServerTaskManager sqlServerTaskManager = new SqlServerTaskManager(_fixture.SqlConnectionWrapperFactory, NullLogger.Instance); + + TaskInfo taskInfo1 = new TaskInfo() + { + TaskId = taskId1, + QueueId = queueId, + TaskTypeId = conflictTestTypeId, + InputData = inputData, + }; + + TaskInfo taskInfo2 = new TaskInfo() + { + TaskId = taskId2, + QueueId = queueId, + TaskTypeId = conflictTestTypeId, + InputData = inputData, + }; + + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo1, true, CancellationToken.None); + await Assert.ThrowsAnyAsync(() => sqlServerTaskManager.CreateTaskAsync(taskInfo2, true, CancellationToken.None)); + } + [Fact] public async Task GivenASqlTaskManager_WhenCreate2TaskWithSameTaskId_ThenNewTaskShouldBeCreated() { string queueId = Guid.NewGuid().ToString(); string taskId = Guid.NewGuid().ToString(); - short typeId = 1; string inputData = "inputData"; TaskInfo taskInfo = new TaskInfo() { TaskId = taskId, QueueId = queueId, - TaskTypeId = typeId, + TaskTypeId = SqlServerTaskManagerTestsTypeId, InputData = inputData, }; SqlServerTaskManager sqlServerTaskManager = new SqlServerTaskManager(_fixture.SqlConnectionWrapperFactory, NullLogger.Instance); - taskInfo = await sqlServerTaskManager.CreateTaskAsync(taskInfo, CancellationToken.None); + taskInfo = await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); - await Assert.ThrowsAsync(async () => await sqlServerTaskManager.CreateTaskAsync(taskInfo, CancellationToken.None)); + await Assert.ThrowsAsync(async () => await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None)); } [Fact] @@ -95,22 +138,80 @@ public async Task GivenASqlTaskManager_WhenCancelTask_ThenTaskStatusShouldBeChan { string queueId = Guid.NewGuid().ToString(); string taskId = Guid.NewGuid().ToString(); - short typeId = 1; string inputData = "inputData"; TaskInfo taskInfo = new TaskInfo() { TaskId = taskId, QueueId = queueId, - TaskTypeId = typeId, + TaskTypeId = SqlServerTaskManagerTestsTypeId, InputData = inputData, }; SqlServerTaskManager sqlServerTaskManager = new SqlServerTaskManager(_fixture.SqlConnectionWrapperFactory, NullLogger.Instance); - _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, CancellationToken.None); + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, false, CancellationToken.None); TaskInfo canceledTask = await sqlServerTaskManager.CancelTaskAsync(taskId, CancellationToken.None); Assert.True(canceledTask.IsCanceled); } + + [Fact] + public async Task GivenASqlTaskManager_WhenCreateTaskByTypeTwice_ConflictShouldBeReturned() + { + string queueId = Guid.NewGuid().ToString(); + string inputData = "inputData"; + short uniqueType = 1234; + + TaskInfo taskInfo = new TaskInfo() + { + TaskId = Guid.NewGuid().ToString(), + QueueId = queueId, + TaskTypeId = uniqueType, + InputData = inputData, + }; + + SqlServerTaskManager sqlServerTaskManager = new SqlServerTaskManager(_fixture.SqlConnectionWrapperFactory, NullLogger.Instance); + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, true, CancellationToken.None); + + taskInfo = new TaskInfo() + { + TaskId = Guid.NewGuid().ToString(), + QueueId = queueId, + TaskTypeId = uniqueType, + InputData = inputData, + }; + + await Assert.ThrowsAnyAsync(async () => await sqlServerTaskManager.CreateTaskAsync(taskInfo, true, CancellationToken.None)); + } + + [Fact] + public async Task GivenATaskCanceledButNotComplete_WhenCreateTaskBySameType_ConflictShouldBeReturned() + { + string queueId = Guid.NewGuid().ToString(); + string inputData = "inputData"; + short uniqueType = 4321; + + TaskInfo taskInfo = new TaskInfo() + { + TaskId = Guid.NewGuid().ToString(), + QueueId = queueId, + TaskTypeId = uniqueType, + InputData = inputData, + }; + + SqlServerTaskManager sqlServerTaskManager = new SqlServerTaskManager(_fixture.SqlConnectionWrapperFactory, NullLogger.Instance); + _ = await sqlServerTaskManager.CreateTaskAsync(taskInfo, true, CancellationToken.None); + _ = await sqlServerTaskManager.CancelTaskAsync(taskInfo.TaskId, CancellationToken.None); + + taskInfo = new TaskInfo() + { + TaskId = Guid.NewGuid().ToString(), + QueueId = queueId, + TaskTypeId = uniqueType, + InputData = inputData, + }; + + await Assert.ThrowsAnyAsync(async () => await sqlServerTaskManager.CreateTaskAsync(taskInfo, true, CancellationToken.None)); + } } } diff --git a/testauthenvironment.json b/testauthenvironment.json index e4aa766b79..b13bf90a4f 100644 --- a/testauthenvironment.json +++ b/testauthenvironment.json @@ -24,6 +24,12 @@ "globalConverter" ] }, + { + "id": "globalImporterUser", + "roles": [ + "globalImporter" + ] + }, { "id": "globalAdminUser", "roles": [