diff --git a/.github/actions/analyze.yml b/.github/actions/analyze.yml
new file mode 100644
index 0000000000..5ad0110a60
--- /dev/null
+++ b/.github/actions/analyze.yml
@@ -0,0 +1,201 @@
+parameters:
+ analyzeARMTemplates: true
+ analyzeBinaries: true
+ analyzePackages: true
+ runAntiMalware: true
+ credScanDirectory: '$(Build.SourcesDirectory)'
+
+steps:
+- task: UseDotNet@2
+ displayName: Use .NET Core sdk
+ inputs:
+ useGlobalJson: true
+
+- ${{ if eq(parameters.analyzeBinaries, 'true') }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: 'Download Binaries'
+ inputs:
+ buildType: 'current'
+ downloadType: 'single'
+ downloadPath: '$(Agent.TempDirectory)/artifacts'
+ artifactName: 'deploy'
+
+- ${{ if eq(parameters.analyzePackages, 'true') }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: 'Download NuGet Packages'
+ inputs:
+ buildType: 'current'
+ downloadType: 'single'
+ downloadPath: '$(Build.SourcesDirectory)/artifacts'
+ artifactName: 'nuget'
+
+- ${{ if eq(parameters.analyzeBinaries, 'true') }}:
+ - task: ExtractFiles@1
+ displayName: 'Extract Stu3 Web Server Binaries'
+ inputs:
+ archiveFilePatterns: '$(Agent.TempDirectory)/artifacts/deploy/Microsoft.Health.Fhir.Stu3.Web.zip'
+ destinationFolder: '$(Build.SourcesDirectory)/artifacts/web/Stu3'
+ - task: ExtractFiles@1
+ displayName: 'Extract R4 Web Server Binaries'
+ inputs:
+ archiveFilePatterns: '$(Agent.TempDirectory)/artifacts/deploy/Microsoft.Health.Fhir.R4.Web.zip'
+ destinationFolder: '$(Build.SourcesDirectory)/artifacts/web/r4'
+ - task: ExtractFiles@1
+ displayName: 'Extract R4B Web Server Binaries'
+ inputs:
+ archiveFilePatterns: '$(Agent.TempDirectory)/artifacts/deploy/Microsoft.Health.Fhir.R4B.Web.zip'
+ destinationFolder: '$(Build.SourcesDirectory)/artifacts/web/r4b'
+ - task: ExtractFiles@1
+ displayName: 'Extract R5 Web Server Binaries'
+ inputs:
+ archiveFilePatterns: '$(Agent.TempDirectory)/artifacts/deploy/Microsoft.Health.Fhir.R5.Web.zip'
+ destinationFolder: '$(Build.SourcesDirectory)/artifacts/web/r5'
+
+- ${{ if eq(parameters.runAntiMalware, 'true') }}:
+ - task: AntiMalware@4
+ inputs:
+ InputType: 'Basic'
+ ScanType: 'CustomScan'
+ FileDirPath: '$(Build.SourcesDirectory)'
+ EnableServices: true
+ TreatSignatureUpdateFailureAs: 'Standard'
+ SignatureFreshness: 'OneDay'
+ TreatStaleSignatureAs: 'Error'
+
+- ${{ if eq(parameters.analyzeARMTemplates, 'true') }}:
+ - task: Armory@2
+ inputs:
+ targetDirectory: '$(Build.SourcesDirectory)/samples/templates'
+ targetFiles: 'f|*.json'
+ excludePassesFromLog: false
+
+ - task: TemplateAnalyzer@3
+ displayName: 'Run Template Analyzer'
+ inputs:
+ ToolVersion: Latest
+ AnalyzeDirectory: '$(Build.SourcesDirectory)/samples/templates'
+ Verbose: false
+ IncludeNonSecurityRules: true
+
+- task: CredScan@3
+ inputs:
+ scanFolder: ${{ parameters.credScanDirectory }}
+ outputFormat: 'pre'
+ suppressionsFile: 'CredScanSuppressions.json'
+ verboseOutput: true
+
+- task: CSRF@1
+ inputs:
+ Path: '$(Build.SourcesDirectory)'
+ ToolVersion: Latest
+
+- task: Trivy@1
+ displayName: 'Run Trivy'
+ inputs:
+ Target: '$(Build.SourcesDirectory)/build/docker'
+ Severities: all
+ VulTypes: all
+
+- task: PSScriptAnalyzer@1
+ displayName: 'Run PSScriptAnalyzer'
+ inputs:
+ Path: '$(Build.SourcesDirectory)'
+ Settings: required
+ IgnorePattern: .gdn
+ Recurse: true
+
+- task: RoslynAnalyzers@3
+ inputs:
+ userProvideBuildInfo: 'msBuildInfo'
+ msBuildArchitecture: 'DotNetCore'
+ msBuildCommandline: 'dotnet build $(Build.SourcesDirectory)/Microsoft.Health.Fhir.sln --configuration $(buildConfiguration) -p:ContinuousIntegrationBuild=true -f net8.0'
+
+- task: BinSkim@4
+ inputs:
+ toolVersion: Latest
+ InputType: Basic
+ Function: analyze
+ AnalyzeTargetGlob: 'f|$(Agent.TempDirectory)/artifacts/**/*Microsoft.Health.*.dll'
+
+ ## PoliCheck@2 does not need to be added since it is run internally
+
+ ## Tools that are no longer supported:
+ # AutoApplicability@1, CodeMetrics@1, VulnerabilityAssessment@0
+
+- task: SdtReport@2
+ condition: succeededOrFailed()
+ continueOnError: True
+ inputs:
+ GdnExportAllTools: false
+ GdnExportGdnToolArmory: ${{ eq(parameters.analyzeARMTemplates, 'true') }}
+ GdnExportGdnToolCredScan: true
+ GdnExportGdnToolCSRF: true
+ GdnExportGdnToolRoslynAnalyzers: true
+ BinSkim: true
+ CredScan: true
+
+- task: PublishSecurityAnalysisLogs@3
+ condition: succeededOrFailed()
+ continueOnError: True
+ inputs:
+ ArtifactName: 'CodeAnalysisLogs'
+ ArtifactType: 'Container'
+ AllTools: false
+ AntiMalware: ${{ eq(parameters.runAntiMalware, 'true') }}
+ APIScan: false
+ Armory: ${{ eq(parameters.analyzeARMTemplates, 'true') }}
+ Bandit: false
+ BinSkim: false
+ CodesignValidation: false
+ CredScan: true
+ CSRF: true
+ ESLint: false
+ Flawfinder: false
+ FortifySCA: false
+ FxCop: false
+ ModernCop: false
+ MSRD: false
+ PoliCheck: false
+ RoslynAnalyzers: true
+ SDLNativeRules: false
+ Semmle: false
+ SpotBugs: false
+ TSLint: false
+ WebScout: false
+ ToolLogsNotFoundAction: 'Standard'
+
+- task: PostAnalysis@2
+ condition: succeededOrFailed()
+ inputs:
+ GdnBreakAllTools: false
+ GdnBreakGdnToolArmory: ${{ eq(parameters.analyzeARMTemplates, 'true') }}
+ GdnBreakGdnToolCredScan: true
+ GdnBreakGdnToolCSRF: true
+ GdnBreakGdnToolRoslynAnalyzers: true
+ BinSkim: true
+ CredScan: true
+
+- task: TSAUpload@2
+ condition: and(succeeded(), eq(variables['build.sourceBranch'], 'refs/heads/main'))
+ displayName: 'TSA upload'
+ inputs:
+ tsaVersion: 'TsaV2'
+ codebase: 'NewOrUpdate'
+ GdnPublishTsaOnboard: false
+ GdnPublishTsaConfigFile: '$(Build.SourcesDirectory)\build\jobs\tsaconfig.gdntsa'
+ GdnPublishTsaExportedResultsPublishable: true
+
+- task: DeleteFiles@1
+ displayName: 'Delete files to make space'
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)'
+ Contents: '**\*'
+
+- task: DropValidatorTask@0
+ displayName: 'SBOM Validator and Publisher Task'
+ inputs:
+ BuildDropPath: '$(Agent.TempDirectory)/artifacts/deploy'
+ OutputPath: 'output.json'
+ ValidateSignature: true
+ Verbosity: 'Verbose'
+ continueOnError: true
diff --git a/.github/actions/build.yml b/.github/actions/build.yml
new file mode 100644
index 0000000000..8bb12db04d
--- /dev/null
+++ b/.github/actions/build.yml
@@ -0,0 +1,88 @@
+parameters:
+ # Default values
+ unitTest: true
+ codeCoverage: false
+ componentGovernance: false
+ packageArtifacts: false
+ packageIntegrationTests: false
+ targetBuildFramework: ''
+
+steps:
+- task: UseDotNet@2
+ displayName: 'Use .NET SDK'
+ inputs:
+ useGlobalJson: true
+
+- ${{ if eq(parameters.targetBuildFramework, '') }}:
+ - task: DotNetCoreCLI@2
+ displayName: 'dotnet build $(buildConfiguration)'
+ inputs:
+ command: build
+ arguments: '--configuration $(buildConfiguration) -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="$(assemblySemVer)" -p:FileVersion="$(assemblySemFileVer)" -p:InformationalVersion="$(informationalVersion)" -p:Version="$(majorMinorPatch)" -warnaserror'
+ workingDirectory: $(System.DefaultWorkingDirectory)
+
+- ${{ if ne(parameters.targetBuildFramework, '') }}:
+ - task: DotNetCoreCLI@2
+ displayName: 'dotnet build $(buildConfiguration)'
+ inputs:
+ command: build
+ arguments: '--configuration $(buildConfiguration) -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="$(assemblySemVer)" -p:FileVersion="$(assemblySemFileVer)" -p:InformationalVersion="$(informationalVersion)" -p:Version="$(majorMinorPatch)" -warnaserror -f ${{parameters.targetBuildFramework}}'
+ workingDirectory: $(System.DefaultWorkingDirectory)
+
+- ${{ if eq(parameters.unitTest, 'true') }}:
+ - task: DotNetCoreCLI@2
+ displayName: 'dotnet test'
+ inputs:
+ command: test
+ projects: '**/*UnitTests/*.csproj'
+ arguments: '--configuration $(buildConfiguration) --no-build -f ${{parameters.targetBuildFramework}}'
+ testRunTitle: 'Unit Tests'
+
+- ${{ if eq(parameters.codeCoverage, 'true') }}:
+ - task: DotNetCoreCLI@2
+ displayName: 'dotnet test with coverage'
+ inputs:
+ command: test
+ projects: '**/*UnitTests/*.csproj'
+ arguments: '--configuration $(buildConfiguration) --no-build --collect "XPlat Code Coverage" -s "$(build.sourcesDirectory)/CodeCoverage.runsettings" -v normal -f ${{parameters.targetBuildFramework}}'
+ testRunTitle: 'Unit Tests'
+ - task: reportgenerator@5
+ displayName: 'aggregate code coverage'
+ condition: succeededOrFailed()
+ inputs:
+ reports: '$(Agent.TempDirectory)/*/coverage.cobertura.xml'
+ reporttypes: 'Cobertura'
+ targetdir: '$(Agent.TempDirectory)/coverage'
+ - task: PublishCodeCoverageResults@1
+ displayName: 'publish code coverage'
+ condition: succeededOrFailed()
+ inputs:
+ codeCoverageTool: 'Cobertura'
+ failIfCoverageEmpty: true
+ summaryFileLocation: '$(Agent.TempDirectory)/coverage/Cobertura.xml'
+ - task: PublishBuildArtifacts@1
+ displayName: 'publish Cobertura.xml'
+ inputs:
+ pathToPublish: '$(Agent.TempDirectory)/coverage/Cobertura.xml'
+ artifactName: 'IntegrationTests'
+ artifactType: 'container'
+
+- ${{ if eq(parameters.packageArtifacts, 'true') }}:
+ # https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/secure-supply-chain/ado-sbom-generator
+ - task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
+ displayName: 'SBOM Generation Task'
+ inputs:
+ BuildDropPath: '$(build.artifactStagingDirectory)'
+ BuildComponentPath: '$(Build.SourcesDirectory)'
+ - task: PublishBuildArtifacts@1
+ displayName: 'Publish SBOM Artifacts'
+ inputs:
+ pathToPublish: '$(build.artifactStagingDirectory)'
+ artifactName: 'deploy'
+ artifactType: 'container'
+
+- ${{ if eq(parameters.packageArtifacts, 'true') }}:
+ - template: package.yml
+
+- ${{ if eq(parameters.packageIntegrationTests, 'true') }}:
+ - template: package-integration-tests.yml
diff --git a/.github/actions/clean-storage-accounts/action.yml b/.github/actions/clean-storage-accounts/action.yml
new file mode 100644
index 0000000000..35df072a4c
--- /dev/null
+++ b/.github/actions/clean-storage-accounts/action.yml
@@ -0,0 +1,30 @@
+name: clean storage Accounts
+description: Removes blob containers from test storage accounts
+
+inputs:
+ environmentName:
+ description: Deployment environment name
+ required: true
+
+runs:
+ using: 'composite'
+ steps:
+ - name: Clean Storage Accounts
+ uses: azure/powershell@v2
+ with:
+ azPSVersion: "latest"
+ inlineScript: |
+ $currentUtcTime = [DateTime]::UtcNow
+ Get-AzContext
+ $storageAccounts = Get-AzStorageAccount -ResourceGroupName ${{ inputs.environmentName }}
+ foreach ($storageAccount in $storageAccounts) {
+
+ $storageContainers = Get-AzStorageContainer -Name * -Context $storageAccount.Context
+ foreach ($container in $storageContainers) {
+ $ageDiff = $currentUtcTime - $container.CloudBlobContainer.Properties.LastModified.UtcDateTime
+ if($ageDiff.TotalDays -ge 3) {
+ Write-Host "Deleting container $($container.Name)"
+ $container.CloudBlobContainer.Delete()
+ }
+ }
+ }
diff --git a/.github/actions/cleanup-integration-test-databases/action.yml b/.github/actions/cleanup-integration-test-databases/action.yml
new file mode 100644
index 0000000000..993c861dfb
--- /dev/null
+++ b/.github/actions/cleanup-integration-test-databases/action.yml
@@ -0,0 +1,25 @@
+name: cleanup integration test databases
+description: Deletes databases used for integration tests from previous runs
+
+inputs:
+ environmentName:
+ description: Deployment environment name
+ required: true
+
+runs:
+ using: 'composite'
+ steps:
+ - name: Remove Integration Test Databases
+ uses: azure/powershell@v1
+ with:
+ azPSVersion: "latest"
+ inlineScript: |
+ Get-AzContext
+ $testNamePatterns = @("SNAPSHOT*","FHIRCOMPATIBILITYTEST*","FHIRINTEGRATIONTEST*","FHIRRESOURCECHANGEDISABLEDTEST*","BASE*","SNAPSHOT*")
+ foreach ($pattern in $testNamePatterns) {
+ $resources = Get-AzResource -ResourceGroupName ${{ inputs.environmentName }} -ResourceType 'Microsoft.Sql/servers/databases' -Name $pattern
+ foreach ($resource in $resources) {
+ Write-Host "Cleaning up $($resource.ResourceName)"
+ Remove-AzResource -ResourceId $resource.ResourceId -Force
+ }
+ }
diff --git a/.github/actions/docker-add-tag.yml b/.github/actions/docker-add-tag.yml
new file mode 100644
index 0000000000..031ae5e484
--- /dev/null
+++ b/.github/actions/docker-add-tag.yml
@@ -0,0 +1,28 @@
+
+parameters:
+- name: sourceTag
+ type: string
+- name: targetTag
+ type: string
+
+jobs:
+- job: DockerAddTag
+ pool:
+ name: '$(DefaultLinuxPool)'
+ vmImage: '$(LinuxVmImage)'
+ steps:
+ - task: AzureCLI@2
+ displayName: 'Azure CLI: InlineScript'
+ inputs:
+ azureSubscription: $(ConnectedServiceName)
+ scriptType: bash
+ scriptLocation: inlineScript
+ inlineScript: |
+ az acr login -n $(azureContainerRegistry)
+ for v in stu3 r4 r4b r5; do
+ sourceImage="$(azureContainerRegistry)/${v}_fhir-server:${{parameters.sourceTag}}"
+ targetImage="$(azureContainerRegistry)/${v}_fhir-server:${{parameters.targetTag}}"
+ docker pull $sourceImage
+ docker tag $sourceImage $targetImage
+ docker push $targetImage
+ done
diff --git a/.github/actions/docker-add-tag/action.yml b/.github/actions/docker-add-tag/action.yml
new file mode 100644
index 0000000000..68d89b1835
--- /dev/null
+++ b/.github/actions/docker-add-tag/action.yml
@@ -0,0 +1,36 @@
+name: Docker Add Main tag
+description: 'Adds the main tag to the images for all supported FHIR versions'
+
+inputs:
+ sourceTag:
+ description: 'The tag to apply to the images'
+ required: true
+ targetTag:
+ description: 'The tag to apply to the images'
+ required: true
+ fhirSchemaVersion:
+ description: 'The FHIR schema version to package'
+ required: true
+ azureContainerRegistry:
+ description: 'The Azure Container Registry to push the images to'
+ required: true
+
+runs:
+ using: 'composite'
+ steps:
+ - name: Azure Login
+ uses: azure/login@v2
+ with:
+ client-id: ${{secrets.AZURE_CLIENT_ID}}
+ subscription-id: ${{secrets.AZURE_SUBSCRIPTION_ID}}
+ tenant-id: ${{secrets.AZURE_TENANT_ID}}
+ enable-AzPSSession: true
+ - name: Add Tag to Docker Images
+ shell: bash
+ run: |
+ az acr login -n ${{inputs.azureContainerRegistry}}
+ sourceImage="${{inputs.azureContainerRegistry}}/${{inputs.fhirSchemaVersion}}_fhir-server:${{inputs.sourceTag}}"
+ targetImage="${{inputs.azureContainerRegistry}}/${{inputs.fhirSchemaVersion}}_fhir-server:${{inputs.targetTag}}"
+ docker pull $sourceImage
+ docker tag $sourceImage $targetImage
+ docker push $targetImage
diff --git a/.github/actions/docker-build-all.yml b/.github/actions/docker-build-all.yml
new file mode 100644
index 0000000000..3f6e766c53
--- /dev/null
+++ b/.github/actions/docker-build-all.yml
@@ -0,0 +1,27 @@
+# DESCRIPTION:
+# Builds and pushes images for all supported FHIR versions
+
+parameters:
+- name: tag
+ type: string
+
+jobs:
+- template: docker-build-push.yml
+ parameters:
+ version: "R4"
+ tag: ${{parameters.tag}}
+
+- template: docker-build-push.yml
+ parameters:
+ version: "R4B"
+ tag: ${{parameters.tag}}
+
+- template: docker-build-push.yml
+ parameters:
+ version: "Stu3"
+ tag: ${{parameters.tag}}
+
+- template: docker-build-push.yml
+ parameters:
+ version: "R5"
+ tag: ${{parameters.tag}}
diff --git a/.github/actions/docker-build-push.yml b/.github/actions/docker-build-push.yml
new file mode 100644
index 0000000000..77cacb2f94
--- /dev/null
+++ b/.github/actions/docker-build-push.yml
@@ -0,0 +1,40 @@
+# DESCRIPTION:
+# Builds and pushes a docker image for a given FHIR version
+
+parameters:
+- name: version
+ type: string
+- name: tag
+ type: string
+
+jobs:
+- job: '${{parameters.version}}_Docker'
+ pool:
+ name: '$(DefaultLinuxPool)'
+ vmImage: '$(LinuxVmImage)'
+ steps:
+ - task: DockerCompose@0
+ displayName: 'Build FHIR ${{parameters.version}} Server Image'
+ inputs:
+ action: Build services
+ azureSubscriptionEndpoint: $(azureSubscriptionEndpoint)
+ azureContainerRegistry: $(azureContainerRegistry)
+ dockerComposeFile: $(composeLocation)
+ dockerComposeFileArgs: |
+ FHIR_VERSION=${{parameters.version}}
+ ASSEMBLY_VER=$(assemblySemFileVer)
+ projectName: ${{parameters.version}}
+ additionalImageTags: ${{parameters.tag}}
+
+ - task: DockerCompose@0
+ displayName: 'Push FHIR ${{parameters.version}} Server Image'
+ inputs:
+ action: Push services
+ azureSubscriptionEndpoint: $(azureSubscriptionEndpoint)
+ azureContainerRegistry: $(azureContainerRegistry)
+ dockerComposeFile: $(composeLocation)
+ dockerComposeFileArgs: |
+ FHIR_VERSION=${{parameters.version}}
+ ASSEMBLY_VER=$(assemblySemFileVer)
+ projectName: ${{parameters.version}}
+ additionalImageTags: ${{parameters.tag}}
diff --git a/.github/actions/docker-build/action.yml b/.github/actions/docker-build/action.yml
new file mode 100644
index 0000000000..1ce5a0ec13
--- /dev/null
+++ b/.github/actions/docker-build/action.yml
@@ -0,0 +1,23 @@
+name: Docker Build
+description: 'Builds images for all supported FHIR versions'
+
+inputs:
+ fhirSchemaVersion:
+ description: 'The FHIR schema version to package'
+ required: true
+ assemblyVersion:
+ description: 'The assembly version to use'
+ required: true
+ composeLocation:
+ description: 'The location of the docker-compose file'
+ required: true
+
+runs:
+ using: 'composite'
+ steps:
+ - name: Build Docker Image
+ shell: bash
+ run: |
+ echo "Building and pushing Docker images for FHIR schema version ${{inputs.fhirSchemaVersion}}"
+ cd build/docker
+ docker-compose build --build-arg FHIR_VERSION=${{inputs.fhirSchemaVersion}} --build-arg ASSEMBLY_VER=${{inputs.assemblyVersion}}
diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml
new file mode 100644
index 0000000000..01d875b7e3
--- /dev/null
+++ b/.github/actions/dotnet-build/action.yml
@@ -0,0 +1,29 @@
+name: dotnet build
+description: Builds the packages and ensures their quality by running tests.
+inputs:
+ assemblyVersion:
+ description: The scaler assembly's version.
+ required: true
+ buildConfiguration:
+ default: Debug
+ description: The dotnet build configuration.
+ required: false
+ fileVersion:
+ description: The scaler assembly's file version.
+ required: true
+ informationalVersion:
+ description: The scaler assembly's informational version.
+ required: true
+ majorMinorPatch:
+ description: The major.minor.patch version to use.
+ required: true
+ dotnet-version:
+ description: 'The version of the .NET SDK to use'
+ required: true
+ default: '8.0.202' # Default version if not specified
+runs:
+ using: composite
+ steps:
+ - name: Build
+ shell: bash
+ run: dotnet build "./Microsoft.Health.Fhir.sln" --configuration ${{inputs.buildConfiguration}} "-p:ContinuousIntegrationBuild=true;AssemblyVersion=${{inputs.assemblyVersion}};FileVersion=${{inputs.fileVersion}};InformationalVersion=${{inputs.informationalVersion}};Version=${{inputs.majorMinorPatch}}" -warnaserror
diff --git a/.github/actions/dotnet-test/action.yml b/.github/actions/dotnet-test/action.yml
new file mode 100644
index 0000000000..26f17de674
--- /dev/null
+++ b/.github/actions/dotnet-test/action.yml
@@ -0,0 +1,12 @@
+name: dotnet test
+description: 'Runs the unit tests for the Fhir solution'
+inputs:
+ buildConfiguration:
+ description: 'The build configuration to use'
+ required: true
+runs:
+ using: 'composite'
+ steps:
+ - name: Run Unit Tests
+ shell: bash
+ run: dotnet test "Microsoft.Health.Fhir.sln" -p:ContinuousIntegrationBuild=true; --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal
diff --git a/.github/actions/e2e-setup.yml b/.github/actions/e2e-setup.yml
new file mode 100644
index 0000000000..33890f84d2
--- /dev/null
+++ b/.github/actions/e2e-setup.yml
@@ -0,0 +1,23 @@
+steps:
+ - task: DownloadBuildArtifacts@0
+ inputs:
+ buildType: 'current'
+ downloadType: 'single'
+ downloadPath: '$(System.ArtifactsDirectory)'
+ artifactName: 'IntegrationTests'
+
+ - task: UseDotNet@2
+ inputs:
+ useGlobalJson: true
+
+ - task: AzureKeyVault@1
+ displayName: 'Azure Key Vault: resolute-oss-tenant-info'
+ inputs:
+ azureSubscription: $(ConnectedServiceName)
+ KeyVaultName: 'resolute-oss-tenant-info'
+
+ - task: AzureKeyVault@1
+ displayName: 'Azure Key Vault: $(DeploymentEnvironmentName)-ts'
+ inputs:
+ azureSubscription: $(ConnectedServiceName)
+ KeyVaultName: '$(DeploymentEnvironmentName)-ts'
diff --git a/.github/actions/e2e-tests-extract.yml b/.github/actions/e2e-tests-extract.yml
new file mode 100644
index 0000000000..1d6a11bf0e
--- /dev/null
+++ b/.github/actions/e2e-tests-extract.yml
@@ -0,0 +1,10 @@
+parameters:
+- name: version
+ type: string
+
+steps:
+ - task: ExtractFiles@1
+ displayName: 'Extract E2E Test Binaries'
+ inputs:
+ archiveFilePatterns: '$(System.ArtifactsDirectory)/IntegrationTests/Microsoft.Health.Fhir.${{ parameters.version }}.Tests.E2E.zip'
+ destinationFolder: '$(Agent.TempDirectory)/E2ETests/'
diff --git a/.github/actions/e2e-tests.yml b/.github/actions/e2e-tests.yml
new file mode 100644
index 0000000000..b69ef2351d
--- /dev/null
+++ b/.github/actions/e2e-tests.yml
@@ -0,0 +1,139 @@
+parameters:
+- name: version
+ type: string
+- name: appServiceName
+ type: string
+- name: appServiceType
+ type: string
+
+steps:
+ - template: e2e-tests-extract.yml
+ parameters:
+ version: ${{parameters.version}}
+
+ - task: AzurePowerShell@4
+ displayName: 'Set Variables'
+ inputs:
+ azureSubscription: $(ConnectedServiceName)
+ azurePowerShellVersion: latestVersion
+ ScriptType: inlineScript
+ Inline: |
+ $keyVault = "$(DeploymentEnvironmentName)-ts"
+ $secrets = Get-AzKeyVaultSecret -VaultName $keyVault
+
+ foreach($secret in $secrets)
+ {
+ $environmentVariableName = $secret.Name.Replace("--","_")
+
+ $secretValue = Get-AzKeyVaultSecret -VaultName $keyVault -Name $secret.Name
+ # Replace with -AsPlainText flag when v5.3 of the Az Module is supported
+ $plainValue = ([System.Net.NetworkCredential]::new("", $secretValue.SecretValue).Password).ToString()
+ if([string]::IsNullOrEmpty($plainValue))
+ {
+ throw "$($secret.Name) is empty"
+ }
+ Write-Host "##vso[task.setvariable variable=$($environmentVariableName)]$($plainValue)"
+ }
+
+ $storageAccounts = Get-AzStorageAccount -ResourceGroupName $(ResourceGroupName)
+ $allStorageAccounts = ""
+ foreach ($storageAccount in $storageAccounts) {
+ $accKey = Get-AzStorageAccountKey -ResourceGroupName $(ResourceGroupName) -Name $storageAccount.StorageAccountName | Where-Object {$_.KeyName -eq "key1"}
+
+ $storageSecretName = "$($storageAccount.StorageAccountName)_secret"
+ Write-Host "##vso[task.setvariable variable=$($storageSecretName)]$($accKey.Value)"
+ $allStorageAccounts += "$($storageSecretName)|$($accKey.Value)|"
+ }
+ Write-Host "##vso[task.setvariable variable=AllStorageAccounts]$($allStorageAccounts)"
+
+ $appServiceName = "${{ parameters.appServiceName }}"
+ $appSettings = (Get-AzWebApp -ResourceGroupName $(ResourceGroupName) -Name $appServiceName).SiteConfig.AppSettings
+ $acrSettings = $appSettings | where {$_.Name -eq "FhirServer__Operations__ConvertData__ContainerRegistryServers__0"}
+ $acrLoginServer = $acrSettings[0].Value
+ $acrAccountName = ($acrLoginServer -split '\.')[0]
+ $acrPassword = (Get-AzContainerRegistryCredential -ResourceGroupName $(ResourceGroupName) -Name $acrAccountName).Password
+ Write-Host "##vso[task.setvariable variable=TestContainerRegistryServer]$($acrLoginServer)"
+ Write-Host "##vso[task.setvariable variable=TestContainerRegistryPassword]$($acrPassword)"
+
+ $exportStoreSettings = $appSettings | where {$_.Name -eq "FhirServer__Operations__Export__StorageAccountUri"}
+ $exportStoreUri = $exportStoreSettings[0].Value
+ Write-Host "$exportStoreUri"
+ $exportStoreAccountName = [System.Uri]::new("$exportStoreUri").Host.Split('.')[0]
+ $exportStoreKey = Get-AzStorageAccountKey -ResourceGroupName $(ResourceGroupName) -Name "$exportStoreAccountName" | Where-Object {$_.KeyName -eq "key1"}
+
+ Write-Host "##vso[task.setvariable variable=TestExportStoreUri]$($exportStoreUri)"
+ Write-Host "##vso[task.setvariable variable=TestExportStoreKey]$($exportStoreKey.Value)"
+
+ $integrationStoreSettings = $appSettings | where {$_.Name -eq "FhirServer__Operations__IntegrationDataStore__StorageAccountUri"}
+ $integrationStoreUri = $integrationStoreSettings[0].Value
+ Write-Host "$integrationStoreUri"
+ $integrationStoreAccountName = [System.Uri]::new("$integrationStoreUri").Host.Split('.')[0]
+ $integrationStoreKey = Get-AzStorageAccountKey -ResourceGroupName $(ResourceGroupName) -Name "$integrationStoreAccountName" | Where-Object {$_.KeyName -eq "key1"}
+
+ Write-Host "##vso[task.setvariable variable=TestIntegrationStoreUri]$($integrationStoreUri)"
+ Write-Host "##vso[task.setvariable variable=TestIntegrationStoreKey]$($integrationStoreKey.Value)"
+
+ Write-Host "##vso[task.setvariable variable=Resource]$(TestApplicationResource)"
+
+ $secrets = Get-AzKeyVaultSecret -VaultName resolute-oss-tenant-info
+
+ foreach($secret in $secrets)
+ {
+ $environmentVariableName = $secret.Name.Replace("--","_")
+
+ $secretValue = Get-AzKeyVaultSecret -VaultName resolute-oss-tenant-info -Name $secret.Name
+ # Replace with -AsPlainText flag when v5.3 of the Az Module is supported
+ $plainValue = ([System.Net.NetworkCredential]::new("", $secretValue.SecretValue).Password).ToString()
+ if([string]::IsNullOrEmpty($plainValue))
+ {
+ throw "$($secret.Name) is empty"
+ }
+ Write-Host "##vso[task.setvariable variable=$($environmentVariableName)]$($plainValue)"
+ }
+ # ----------------------------------------
+
+ dotnet dev-certs https
+
+ - task: DotNetCoreCLI@2
+ displayName: 'E2E ${{ parameters.version }} ${{parameters.appServiceType}}'
+ inputs:
+ command: test
+ arguments: '"$(Agent.TempDirectory)/E2ETests/**/*${{ parameters.version }}.Tests.E2E*.dll" --blame-hang-timeout 7m --filter "FullyQualifiedName~${{parameters.appServiceType}}&Category!=ExportLongRunning"'
+ workingDirectory: "$(System.ArtifactsDirectory)"
+ testRunTitle: '${{ parameters.version }} ${{parameters.appServiceType}}'
+ env:
+ 'TestEnvironmentUrl': $(TestEnvironmentUrl)
+ 'TestEnvironmentUrl_${{ parameters.version }}': $(TestEnvironmentUrl_${{ parameters.version }})
+ 'TestEnvironmentUrl_Sql': $(TestEnvironmentUrl_Sql)
+ 'TestEnvironmentUrl_${{ parameters.version }}_Sql': $(TestEnvironmentUrl_${{ parameters.version }}_Sql)
+ 'Resource': $(Resource)
+ 'AllStorageAccounts': $(AllStorageAccounts)
+ 'TestContainerRegistryServer': $(TestContainerRegistryServer)
+ 'TestContainerRegistryPassword': $(TestContainerRegistryPassword)
+ 'TestExportStoreUri': $(TestExportStoreUri)
+ 'TestExportStoreKey': $(TestExportStoreKey)
+ 'TestIntegrationStoreUri': $(TestIntegrationStoreUri)
+ 'TestIntegrationStoreKey': $(TestIntegrationStoreKey)
+ 'tenant-admin-service-principal-name': $(tenant-admin-service-principal-name)
+ 'tenant-admin-service-principal-password': $(tenant-admin-service-principal-password)
+ 'tenant-admin-user-name': $(tenant-admin-user-name)
+ 'tenant-admin-user-password': $(tenant-admin-user-password)
+ 'tenant-id': $(tenant-id)
+ 'app_globalAdminServicePrincipal_id': $(app_globalAdminServicePrincipal_id)
+ 'app_globalAdminServicePrincipal_secret': $(app_globalAdminServicePrincipal_secret)
+ 'app_nativeClient_id': $(app_nativeClient_id)
+ 'app_nativeClient_secret': $(app_nativeClient_secret)
+ 'app_wrongAudienceClient_id': $(app_wrongAudienceClient_id)
+ 'app_wrongAudienceClient_secret': $(app_wrongAudienceClient_secret)
+ 'user_globalAdminUser_id': $(user_globalAdminUser_id)
+ 'user_globalAdminUser_secret': $(user_globalAdminUser_secret)
+ 'user_globalConverterUser_id': $(user_globalConverterUser_id)
+ 'user_globalConverterUser_secret': $(user_globalConverterUser_secret)
+ 'user_globalExporterUser_id': $(user_globalExporterUser_id)
+ 'user_globalExporterUser_secret': $(user_globalExporterUser_secret)
+ 'user_globalImporterUser_id': $(user_globalImporterUser_id)
+ 'user_globalImporterUser_secret': $(user_globalImporterUser_secret)
+ 'user_globalReaderUser_id': $(user_globalReaderUser_id)
+ 'user_globalReaderUser_secret': $(user_globalReaderUser_secret)
+ 'user_globalWriterUser_id': $(user_globalWriterUser_id)
+ 'user_globalWriterUser_secret': $(user_globalWriterUser_secret)
diff --git a/.github/actions/package-integration-tests.yml b/.github/actions/package-integration-tests.yml
new file mode 100644
index 0000000000..180c259c07
--- /dev/null
+++ b/.github/actions/package-integration-tests.yml
@@ -0,0 +1,17 @@
+steps:
+
+ - task: DotNetCoreCLI@2
+ displayName: 'dotnet publish Integration Tests'
+ inputs:
+ command: publish
+ projects: 'test/**/*.csproj'
+ arguments: '--version-suffix $(build.buildNumber) -o "$(build.binariesdirectory)/IntegrationTests" --configuration $(buildConfiguration) --no-build -f $(defaultBuildFramework)'
+ publishWebProjects: false
+ zipAfterPublish: true
+
+ - task: PublishBuildArtifacts@1
+ displayName: 'publish Integration Tests'
+ inputs:
+ pathToPublish: '$(build.binariesdirectory)/IntegrationTests'
+ artifactName: 'IntegrationTests'
+ artifactType: 'container'
\ No newline at end of file
diff --git a/.github/actions/package-web-build-artifacts/action.yml b/.github/actions/package-web-build-artifacts/action.yml
new file mode 100644
index 0000000000..682417286f
--- /dev/null
+++ b/.github/actions/package-web-build-artifacts/action.yml
@@ -0,0 +1,27 @@
+name: Package Web Build Artifacts
+description: 'Packages the web build artifacts for deployment'
+inputs:
+ fhirSchemaVersion:
+ description: 'The FHIR schema version to package'
+ required: true
+ majorMinorPatch:
+ description: 'The version of the Nuget package'
+ required: true
+ outputPath:
+ description: 'The path to the output directory'
+ required: true
+ buildConfiguration:
+ description: 'The build configuration to use'
+ required: true
+ semVer:
+ description: 'The SemVer to use'
+ required: true
+runs:
+ using: 'composite'
+ steps:
+ - name: Publish Web Artifacts
+ shell: bash
+ run: |
+ echo "Publishing web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}"
+ dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/deploy/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -f ${{env.defaultDotNetVersion}}
+ zip Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.zip ${{inputs.outputPath}}/deploy/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web
diff --git a/.github/actions/package-web.yml b/.github/actions/package-web.yml
new file mode 100644
index 0000000000..edc1378081
--- /dev/null
+++ b/.github/actions/package-web.yml
@@ -0,0 +1,14 @@
+parameters:
+ csproj: '**/*Web.csproj'
+
+steps:
+
+ # Package web
+
+ - task: DotNetCoreCLI@2
+ displayName: 'dotnet publish ${{parameters.csproj}}'
+ inputs:
+ command: publish
+ projects: '${{parameters.csproj}}'
+ arguments: '--output $(build.artifactStagingDirectory)/web --configuration $(buildConfiguration) --version-suffix $(build.buildNumber) --no-build -f $(defaultBuildFramework)'
+ publishWebProjects: false
\ No newline at end of file
diff --git a/.github/actions/package.yml b/.github/actions/package.yml
new file mode 100644
index 0000000000..0fbf89aa9e
--- /dev/null
+++ b/.github/actions/package.yml
@@ -0,0 +1,93 @@
+steps:
+
+ # Package web
+ - template: package-web.yml
+ parameters:
+ csproj: '**/Microsoft.Health.Fhir.Stu3.Web.csproj'
+
+ - template: package-web.yml
+ parameters:
+ csproj: '**/Microsoft.Health.Fhir.R4.Web.csproj'
+
+ - template: package-web.yml
+ parameters:
+ csproj: '**/Microsoft.Health.Fhir.R4B.Web.csproj'
+
+ - template: package-web.yml
+ parameters:
+ csproj: '**/Microsoft.Health.Fhir.R5.Web.csproj'
+
+ # Package nugets
+ - powershell: |
+ & dotnet pack $(Build.SourcesDirectory) --output $(Build.ArtifactStagingDirectory)/nupkgs --no-build --configuration=Release -p:PackageVersion=$(nuGetVersion)
+ name: PackNugets
+
+ # Publish artifacts
+ - task: PublishBuildArtifacts@1
+ displayName: 'publish web artifacts'
+ inputs:
+ pathToPublish: '$(build.artifactStagingDirectory)/web'
+ artifactName: 'deploy'
+ artifactType: 'container'
+
+ - task: PublishBuildArtifacts@1
+ displayName: 'publish samples'
+ inputs:
+ pathToPublish: './samples/'
+ artifactName: 'deploy'
+ artifactType: 'container'
+
+ - task: PublishBuildArtifacts@1
+ displayName: 'publish testauthenvironment.json'
+ inputs:
+ pathToPublish: './testauthenvironment.json'
+ artifactName: 'deploy'
+ artifactType: 'container'
+
+ - task: PublishBuildArtifacts@1
+ displayName: 'publish global.json'
+ inputs:
+ pathToPublish: './global.json'
+ artifactName: 'deploy'
+ artifactType: 'container'
+
+ - task: PublishBuildArtifacts@1
+ displayName: 'publish test configuration jsons'
+ inputs:
+ pathToPublish: './test/Configuration/'
+ artifactName: 'deploy'
+ artifactType: 'container'
+
+ - task: PublishBuildArtifacts@1
+ displayName: 'publish release directory'
+ inputs:
+ pathToPublish: './release/'
+ artifactName: 'deploy'
+ artifactType: 'container'
+
+ - task: PublishBuildArtifacts@1
+ displayName: 'publish nuget artifacts'
+ inputs:
+ pathtoPublish: '$(build.artifactStagingDirectory)/nupkgs'
+ artifactName: 'nuget'
+ publishLocation: 'container'
+
+ - task: CopyFiles@2
+ displayName: 'copy symbols'
+ inputs:
+ sourceFolder: '$(build.sourcesDirectory)'
+ contents: |
+ **/*.pdb
+ !**/*.UnitTests.pdb
+ targetFolder: '$(build.artifactStagingDirectory)/symbols'
+ cleanTargetFolder: true
+ flattenFolders: true
+ overWrite: true
+
+ - task: PublishBuildArtifacts@1
+ displayName: 'publish symbol artifacts'
+ inputs:
+ pathtoPublish: '$(build.artifactStagingDirectory)/symbols'
+ artifactName: 'symbols'
+ publishLocation: 'container'
+
\ No newline at end of file
diff --git a/.github/actions/provision-healthcheck.yml b/.github/actions/provision-healthcheck.yml
new file mode 100644
index 0000000000..40107e6ebf
--- /dev/null
+++ b/.github/actions/provision-healthcheck.yml
@@ -0,0 +1,26 @@
+parameters:
+- name: webAppName
+ type: string
+
+steps:
+- powershell: |
+ $webAppName = "${{ parameters.webAppName }}".ToLower()
+ $healthCheckUrl = "https://$webAppName.azurewebsites.net/health/check"
+ $healthStatus = 0
+ Do {
+ Start-Sleep -s 5
+ Write-Host "Checking: $healthCheckUrl"
+
+ try {
+ $healthStatus = (Invoke-WebRequest -URI $healthCheckUrl).statuscode
+ Write-Host "Result: $healthStatus"
+ }
+ catch {
+ Write-Host $PSItem.Exception.Message
+ }
+ finally {
+ $Error.Clear()
+ }
+
+ } While ($healthStatus -ne 200)
+ name: PingHealthCheckEndpoint
diff --git a/.github/actions/provision-sqlServer.yml b/.github/actions/provision-sqlServer.yml
new file mode 100644
index 0000000000..190add675e
--- /dev/null
+++ b/.github/actions/provision-sqlServer.yml
@@ -0,0 +1,44 @@
+
+parameters:
+- name: resourceGroup
+ type: string
+- name: sqlServerName
+ type: string
+- name: schemaAutomaticUpdatesEnabled
+ type: string
+ default: 'auto'
+- name: sqlServerAdminPassword
+ type: string
+ default: ''
+
+jobs:
+- job: provisionEnvironment
+ pool:
+ name: '$(SharedLinuxPool)'
+ vmImage: '$(LinuxVmImage)'
+ steps:
+ - task: AzureKeyVault@1
+ displayName: 'Azure Key Vault: resolute-oss-tenant-info'
+ inputs:
+ azureSubscription: $(ConnectedServiceName)
+ KeyVaultName: 'resolute-oss-tenant-info'
+
+ - task: AzurePowerShell@5
+ displayName: 'Azure PowerShell script: InlineScript'
+ inputs:
+ azureSubscription: $(ConnectedServiceName)
+ azurePowerShellVersion: latestVersion
+ ScriptType: inlineScript
+ Inline: |
+ Add-Type -AssemblyName System.Web
+
+ $templateParameters = @{
+ sqlAdminPassword = "${{parameters.sqlServerAdminPassword}}"
+ sqlServerName = "${{parameters.sqlServerName}}".ToLower()
+ sqlSchemaAutomaticUpdatesEnabled = "${{parameters.schemaAutomaticUpdatesEnabled}}"
+ }
+
+ Write-Host "Provisioning Sql server"
+ Write-Host "Resource Group: ${{ parameters.resourceGroup }}"
+ Write-Host "SqlServerName: ${{ parameters.sqlServerName }}"
+ New-AzResourceGroupDeployment -ResourceGroupName "${{ parameters.resourceGroup }}" -TemplateFile $(System.DefaultWorkingDirectory)/samples/templates/default-sqlServer.json -TemplateParameterObject $templateParameters -Verbose
diff --git a/.github/actions/redeploy-webapp.yml b/.github/actions/redeploy-webapp.yml
new file mode 100644
index 0000000000..6845260147
--- /dev/null
+++ b/.github/actions/redeploy-webapp.yml
@@ -0,0 +1,29 @@
+parameters:
+- name: version
+ type: string
+- name: webAppName
+ type: string
+- name: subscription
+ type: string
+- name: imageTag
+ type: string
+
+jobs:
+- job: provisionEnvironment
+ pool:
+ name: '$(DefaultLinuxPool)'
+ vmImage: '$(LinuxVmImage)'
+ steps:
+ - task: AzureRmWebAppDeployment@4
+ displayName: 'Azure App Service Deploy'
+ inputs:
+ azureSubscription: '${{ parameters.subscription }}'
+ appType: 'webAppContainer'
+ WebAppName: '${{ parameters.webAppName }}'
+ DockerNamespace: $(azureContainerRegistry)
+ DockerRepository: '${{ parameters.version }}_fhir-server'
+ DockerImageTag: ${{ parameters.imageTag }}
+
+ - template: ./provision-healthcheck.yml
+ parameters:
+ webAppName: ${{ parameters.webAppName }}
\ No newline at end of file
diff --git a/.github/actions/run-tests.yml b/.github/actions/run-tests.yml
new file mode 100644
index 0000000000..d8672f3c1c
--- /dev/null
+++ b/.github/actions/run-tests.yml
@@ -0,0 +1,79 @@
+parameters:
+- name: version
+ type: string
+- name: keyVaultName
+ type: string
+- name: appServiceName
+ type: string
+jobs:
+- job: "integrationTests"
+ pool:
+ name: '$(SharedLinuxPool)'
+ vmImage: '$(LinuxVmImage)'
+ steps:
+ - task: DownloadBuildArtifacts@0
+ inputs:
+ buildType: 'current'
+ downloadType: 'single'
+ downloadPath: '$(System.ArtifactsDirectory)'
+ artifactName: 'IntegrationTests'
+
+ - task: ExtractFiles@1
+ displayName: 'Extract Integration Test Binaries'
+ inputs:
+ archiveFilePatterns: '$(System.ArtifactsDirectory)/IntegrationTests/Microsoft.Health.Fhir.${{ parameters.version }}.Tests.Integration.zip'
+ destinationFolder: '$(Agent.TempDirectory)/IntegrationTests/'
+
+ - task: UseDotNet@2
+ inputs:
+ useGlobalJson: true
+
+ - task: AzureKeyVault@1
+ displayName: 'Azure Key Vault: ${{ parameters.keyVaultName }}'
+ inputs:
+ azureSubscription: $(ConnectedServiceName)
+ KeyVaultName: '${{ parameters.keyVaultName }}'
+
+ - task: AzureKeyVault@1
+ displayName: 'Azure Key Vault: ${{ parameters.keyVaultName }}-sql'
+ inputs:
+ azureSubscription: $(ConnectedServiceName)
+ KeyVaultName: '${{ parameters.keyVaultName }}-sql'
+
+ - task: DotNetCoreCLI@2
+ displayName: 'Run Integration Tests'
+ inputs:
+ command: test
+ arguments: '"$(Agent.TempDirectory)/IntegrationTests/**/*${{ parameters.version }}.Tests.Integration*.dll" --blame-hang-timeout 15m'
+ workingDirectory: "$(System.ArtifactsDirectory)"
+ testRunTitle: '${{ parameters.version }} Integration'
+ env:
+ 'CosmosDb:Host': $(CosmosDb--Host)
+ 'CosmosDb:Key': $(CosmosDb--Key)
+ 'SqlServer:ConnectionString': $(SqlServer--ConnectionString)
+
+- job: 'cosmosE2eTests'
+ dependsOn: []
+ pool:
+ name: '$(SharedLinuxPool)'
+ vmImage: '$(LinuxVmImage)'
+ steps:
+ - template: e2e-setup.yml
+ - template: e2e-tests.yml
+ parameters:
+ version: ${{ parameters.version }}
+ appServiceName: ${{ parameters.appServiceName }}
+ appServiceType: 'CosmosDb'
+
+- job: 'sqlE2eTests'
+ dependsOn: []
+ pool:
+ name: '$(SharedLinuxPool)'
+ vmImage: '$(LinuxVmImage)'
+ steps:
+ - template: e2e-setup.yml
+ - template: e2e-tests.yml
+ parameters:
+ version: ${{ parameters.version }}
+ appServiceName: '${{ parameters.appServiceName }}-sql'
+ appServiceType: 'SqlServer'
diff --git a/.github/actions/setup-build-variables/action.yml b/.github/actions/setup-build-variables/action.yml
new file mode 100644
index 0000000000..b78873f84b
--- /dev/null
+++ b/.github/actions/setup-build-variables/action.yml
@@ -0,0 +1,38 @@
+name: setup build variables
+description: Sets variables used during builds.
+
+runs:
+ using: composite
+ steps:
+ - name: Set Build Variables
+ id: defaultVariables
+ shell: bash
+ run: |
+ echo "buildConfiguration=Release" >> "$GITHUB_ENV"
+ echo "defaultBuildFramework=net8.0" >> "$GITHUB_ENV"
+ echo "azureSubscriptionEndpoint=docker-build" >> "$GITHUB_ENV"
+ echo "azureContainerRegistryName=healthplatformregistry" >> "$GITHUB_ENV"
+ echo "connectedServiceName=Microsoft Health Open Source Subscription" >> "$GITHUB_ENV"
+ echo "composeLocation=build/docker/docker-compose.yaml" >> "$GITHUB_ENV"
+
+ - name: Set Build Urls using Deployment Environment
+ shell: bash
+ run: |
+ echo "azureContainerRegistry='$azureContainerRegistryName'.azurecr.io" >> "$GITHUB_ENV"
+ echo "deploymentEnvironmentNameSql='$deploymentEnvironmentName-sql' >> "$GITHUB_ENV"
+ echo "deploymentEnvironmentNameR4='$deploymentEnvironmentName-r4' >> "$GITHUB_ENV"
+ echo "deploymentEnvironmentNameR4Sql='$deploymentEnvironmentNameR4'-sql >> "$GITHUB_ENV"
+ echo "deploymentEnvironmentNameR4B='$deploymentEnvironmentName-r4b' >> "$GITHUB_ENV"
+ echo "deploymentEnvironmentNameR4BSql='$deploymentEnvironmentNameR4B'-sql >> "$GITHUB_ENV"
+ echo "deploymentEnvironmentNameR5='$deploymentEnvironmentName'-r5 >> "$GITHUB_ENV"
+ echo "deploymentEnvironmentNameR5Sql='$deploymentEnvironmentNameR5'-sql >> "$GITHUB_ENV"
+ echo "testEnvironmentUrl=https://'$deploymentEnvironmentName'.azurewebsites.net >> "$GITHUB_ENV"
+ echo "testEnvironmentUrl_Sql=https://'$deploymentEnvironmentName'-sql.azurewebsites.net >> "$GITHUB_ENV"
+ echo "testEnvironmentUrl_R4=https://'$deploymentEnvironmentName'-r4.azurewebsites.net >> "$GITHUB_ENV"
+ echo "testEnvironmentUrl_R4_Sql=https://'$deploymentEnvironmentName'-r4-sql.azurewebsites.net >> "$GITHUB_ENV"
+ echo "testEnvironmentUrl_R4B=https://'$deploymentEnvironmentName'-r4b.azurewebsites.net >> "$GITHUB_ENV"
+ echo "testEnvironmentUrl_R4B_Sql=https://'$deploymentEnvironmentName'-r4b-sql.azurewebsites.net >> "$GITHUB_ENV"
+ echo "testEnvironmentUrl_R5=https://'$deploymentEnvironmentName'-r5.azurewebsites.net >> "$GITHUB_ENV"
+ echo "testEnvironmentUrl_R5_Sql=https://'$deploymentEnvironmentName'-r5-sql.azurewebsites.net >> "$GITHUB_ENV"
+ echo "testClientUrl=https://'$deploymentEnvironmentName'-client/ >> "$GITHUB_ENV"
+ echo "testApplicationResource=https://'$deploymentEnvironmentName'.'$tenantDomain' >> "$GITHUB_ENV"
diff --git a/.github/actions/update-semver/action.yml b/.github/actions/update-semver/action.yml
new file mode 100644
index 0000000000..ccaa23c812
--- /dev/null
+++ b/.github/actions/update-semver/action.yml
@@ -0,0 +1,39 @@
+name: update-semver
+description: 'Update the build number with the SemVer from GitVersion'
+inputs:
+ configFilePath:
+ description: 'Path to the GitVersion configuration file'
+ required: false
+ default: './GitVersion.yml'
+outputs:
+ assemblyVersion:
+ description: The assembly version for the build
+ value: ${{ steps.version.outputs.GitVersion_AssemblySemVer }}
+ fileVersion:
+ description: The assembly file version for the build
+ value: ${{ steps.version.outputs.GitVersion_AssemblySemFileVer }}
+ informationalVersion:
+ description: The assembly informational version for the build
+ value: ${{ steps.version.outputs.GitVersion_InformationalVersion }}
+ semVer:
+ description: The NuGet package version for the build
+ value: ${{ steps.version.outputs.GitVersion_SemVer }}
+ majorMinorPatch:
+ description: The major.minor.patch version for the build
+ value: ${{ steps.version.outputs.GitVersion_MajorMinorPatch }}
+runs:
+ using: 'composite'
+ steps:
+
+ - name: Install GitVersion'
+ uses: gittools/actions/gitversion/setup@v0.13.4
+ with:
+ versionSpec: '5.x'
+
+ - name: SetVariablesFromGitVersion
+ id: version
+ uses: gittools/actions/gitversion/execute@v0.13.4
+ with:
+ configFilePath: ${{inputs.configFilePath}}
+ targetPath: ${{github.workspace}}
+ useConfigFile: true
diff --git a/.github/actions/update-sqlAdminPassword.yml b/.github/actions/update-sqlAdminPassword.yml
new file mode 100644
index 0000000000..b1d4a2466e
--- /dev/null
+++ b/.github/actions/update-sqlAdminPassword.yml
@@ -0,0 +1,18 @@
+steps:
+
+- task: UseDotNet@2
+ displayName: 'Use .NET Core sdk (to generate password)'
+ inputs:
+ packageType: sdk
+ version: 3.1.x
+
+- task: UseDotNet@2
+ inputs:
+ useGlobalJson: true
+
+- powershell: |
+
+ $random = -join((((33,35,37,38,42,43,45,46,95) + (48..57) + (65..90) + (97..122) | Get-Random -Count 20) + ((33,35,37,38,42,43,45,46,95) | Get-Random -Count 1) + ((48..57) | Get-Random -Count 1) + ((65..90) | Get-Random -Count 1) + ((97..122) | Get-Random -Count 1) | Get-Random -Count 24) | % {[char]$_})
+ Write-Host "##vso[task.setvariable variable=password;isOutput=true]"
+
+ name: SetVariablesFromRandomString
diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml
new file mode 100644
index 0000000000..e3117043a2
--- /dev/null
+++ b/.github/workflows/fhir-oss-ci-pipeline.yml
@@ -0,0 +1,258 @@
+# DESCRIPTION:
+# Builds, tests, and packages the solution for the main branch.
+
+on:
+ pull_request
+
+permissions:
+ id-token: write
+ contents: read
+
+defaults:
+ run:
+ working-directory: src
+ shell: bash
+
+env:
+ buildConfiguration: Release
+ azureSubscriptionEndpoint: docker-build
+ azureContainerRegistryName: healthplatformregistry
+ connectedServiceName: Microsoft Health Open Source Subscription
+ composeLocation: build/docker/docker-compose.yaml
+ imageTag: ${{github.run_number}}
+ outputPath: ${{github.workspace}}/artifacts
+ defaultDotNetVersion: net8.0
+
+jobs:
+ setup:
+ runs-on: [self-hosted, 1ES.Pool=GithubRunPool]
+ env:
+ deploymentEnvironmentName: $vars.CIRESOURCEGROUPROOT
+ appServicePlanName: $vars.CIRESOURCEGROUPROOT-linux
+ resourceGroupName: $vars.CIRESOURCEGROUPROOT
+ outputs:
+ assemblyVersion: ${{ steps.version.outputs.assemblyVersion }}
+ fileVersion: ${{ steps.version.outputs.fileVersion }}
+ informationalVersion: ${{ steps.version.outputs.informationalVersion }}
+ majorMinorPatch: ${{ steps.version.outputs.majorMinorPatch }}
+ semVer: ${{steps.version.outputs.SemVer}}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Install Latest .Net SDK
+ uses: actions/setup-dotnet@v4
+ with:
+ global-json-file: 'global.json'
+ dotnet-version: |
+ 6.x
+ 8.x
+
+ - name: Determine Semver
+ id: version
+ uses: ./.github/actions/update-semver
+
+ # - name: Azure Login
+ # uses: azure/login@v2
+ # with:
+ # client-id: ${{secrets.AZURE_CLIENT_ID}}
+ # subscription-id: ${{secrets.AZURE_SUBSCRIPTION_ID}}
+ # tenant-id: ${{secrets.AZURE_TENANT_ID}}
+ # enable-AzPSSession: true
+
+ # - name: Clean Storage Accounts
+ # uses: ./.github/actions/clean-storage-accounts
+ # with:
+ # environmentName: ${{vars.CIRESOURCEGROUPROOT}}
+ # - name: Cleanup Integration Test databases
+ # uses: ./.github/actions/cleanup-integration-test-databases
+ # with:
+ # environmentName: ${{vars.CIRESOURCEGROUPROOT}}
+ buildAndUnitTest:
+ runs-on: windows-latest
+ needs: setup
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Check DotNet Version
+ run: dotnet --version
+
+ - name: Build
+ uses: ./.github/actions/dotnet-build
+ with:
+ assemblyVersion: ${{needs.setup.outputs.assemblyVersion}}
+ buildConfiguration: ${{env.buildConfiguration}}
+ fileVersion: ${{needs.setup.outputs.fileVersion}}
+ informationalVersion: ${{needs.setup.outputs.informationalVersion}}
+ majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}}
+ - name: Test
+ uses: ./.github/actions/dotnet-test
+ with:
+ buildConfiguration: ${{env.buildConfiguration}}
+
+ # - name: Generate SBOM
+ # run: |
+ # curl -Lo $RUNNER_TEMP/sbom-tool https://github.com/microsoft/sbom-tool/releases/latest/download/sbom-tool-linux-x64
+ # chmod +x $RUNNER_TEMP/sbom-tool
+ # $RUNNER_TEMP/sbom-tool generate -b ${{env.outputPath}} -bc . -V Verbose -ps "Organization: Microsoft" -pv ${{needs.setup.outputs.majorMinorPatch}} -pn ${{needs.setup.outputs.informationalVersion}}
+
+ # - name: Upload a Build Artifact
+ # uses: actions/upload-artifact@v4
+ # with:
+ # name: build
+ # path: ${{env.outputPath}}
+ - name: Create Nuget packages
+ shell: bash
+ run: |
+ echo "Creating Nuget packages"
+ dotnet pack ${{github.workspace}}\Microsoft.Health.Fhir.sln --output ${{env.outputPath}}/nupkgs --no-build --configuration=${{env.buildConfiguration}} -p:PackageVersion=${{needs.setup.outputs.majorMinorPatch}}
+
+ - name: Upload Nuget Packages
+ uses: actions/upload-artifact@v4
+ with:
+ name: nuget
+ path: ${{env.outputPath}}/nupkgs
+
+ - name: samples
+ shell: bash
+ run: |
+ echo "Copying samples to deploy directory"
+ cp -r ${{github.workspace}}/samples ${{env.outputPath}}/deploy
+
+ - name: Copying testauthenvironment.json to deploy directory
+ shell: bash
+ run: |
+ echo "Copying testauthenvironment.json to deploy directory"
+ cp ${{github.workspace}}/testauthenvironment.json ${{env.outputPath}}/deploy/
+
+ - name: Copying global.json to deploy directory
+ shell: bash
+ run: |
+ echo "Copying global.json to deploy directory"
+ cp ${{github.workspace}}/global.json ${{env.outputPath}}/deploy/
+
+ - name: Copying test configuration json to deploy directory
+ shell: bash
+ run: |
+ echo "Copying test configuration json to deploy directory"
+ cp ${{github.workspace}}/test/Configuration/testconfiguration.json ${{env.outputPath}}/deploy/
+
+ - name: Copying docker compose root file to deploy directory
+ shell: bash
+ run: |
+ echo "Copying docker compose root file to deploy directory"
+ cp ${{github.workspace}}/release/docker-compose.yaml ${{env.outputPath}}/deploy
+
+ - name: Copying pdb files to symbols directory
+ shell: bash
+ run: |
+ echo "Copying pdb files to deploy symbols"
+ find ${{github.workspace}}/src -type f -name "*.pdb" ! -name "*UnitTest*" -exec cp {} ${{env.outputPath}}/symbols \;
+
+ - name: Publish Stu3 Web Artifacts to deploy directory
+ uses: ./.github/actions/package-web-build-artifacts
+ with:
+ fhirschemaversion: "Stu3"
+ majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}}
+ outputPath: ${{env.outputPath}}
+ buildConfiguration: ${{env.buildConfiguration}}
+ semVer: ${{needs.setup.outputs.semVer}}
+
+ - name: Publish R4 Web Artifacts to deploy directory
+ uses: ./.github/actions/package-web-build-artifacts
+ with:
+ fhirschemaversion: "R4"
+ majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}}
+ outputPath: ${{env.outputPath}}
+ buildConfiguration: ${{env.buildConfiguration}}
+ semVer: ${{needs.setup.outputs.semVer}}
+
+ - name: Publish R4B Web Artifacts to deploy directory
+ uses: ./.github/actions/package-web-build-artifacts
+ with:
+ fhirschemaversion: "R4B"
+ majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}}
+ outputPath: ${{env.outputPath}}
+ buildConfiguration: ${{env.buildConfiguration}}
+ semVer: ${{needs.setup.outputs.semVer}}
+
+ - name: Publish R5 Web Artifacts to deploy directory
+ uses: ./.github/actions/package-web-build-artifacts
+ with:
+ fhirschemaversion: "R5"
+ majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}}
+ outputPath: ${{env.outputPath}}
+ buildConfiguration: ${{env.buildConfiguration}}
+ semVer: ${{needs.setup.outputs.semVer}}
+
+ - name: Docker Build Stu3 Image
+ uses: ./.github/actions/docker-build
+ with:
+ assemblyVersion: ${{needs.setup.outputs.majorMinorPatch}}
+ fhirSchemaVersion: "Stu3"
+ composeLocation: ${{env.composeLocation}}
+
+ - name: Docker Build R4 Image
+ uses: ./.github/actions/docker-build
+ with:
+ assemblyVersion: ${{needs.setup.outputs.majorMinorPatch}}
+ fhirSchemaVersion: "R4"
+ composeLocation: ${{env.composeLocation}}
+
+ - name: Docker Build R4B Image
+ uses: ./.github/actions/docker-build
+ with:
+ assemblyVersion: ${{needs.setup.outputs.majorMinorPatch}}
+ fhirSchemaVersion: "R4B"
+ composeLocation: ${{env.composeLocation}}
+
+ - name: Docker Build R5 Image
+ uses: ./.github/actions/docker-build
+ with:
+ assemblyVersion: ${{needs.setup.outputs.majorMinorPatch}}
+ fhirSchemaVersion: "R5"
+ composeLocation: ${{env.composeLocation}}
+
+ - name: Upload deploy directory
+ uses: actions/upload-artifact@v4
+ with:
+ name: deploy
+ path: ${{env.outputPath}}/deploy
+
+ # - name: Upload Symbols
+ # uses: actions/upload-artifact@v4
+ # with:
+ # name: symbols
+ # path: ${{env.outputPath}}/bin/${{env.buildConfiguration}}/net5.0/publish
+
+ # runIntegrationTests:
+ # runs-on: [self-hosted, 1ES.Pool=GithubRunPool, Windows]
+ # needs : buildAndUnitTest
+ # steps:
+ # - name: Checkout
+ # uses: actions/checkout@v4
+ # with:
+ # fetch-depth: 0
+ # - name: Download Build Artifact for Testing
+ # uses: actions/download-artifact@v4
+ # with:
+ # path: artifacts
+ # - name: Install Latest .Net SDK
+ # uses: actions/setup-dotnet@v4
+ # with:
+ # global-json-file: 'global.json'
+ # dotnet-version: |
+ # 6.x
+ # 8.x
+ # - name: Docker add main tag
+ # uses: ./.github/actions/docker-add-main-tag
+ # with:
+ # assemblySemFileVer: ${{needs.setup.outputs.semVer}}
+ # imageTag: ${{env.imageTag}}
+ # azureContainerRegistryName: ${{env.azureContainerRegistryName}}
+ # connectedServiceName: ${{env.connectedServiceName}}
diff --git a/CustomAnalysisRules.ruleset b/CustomAnalysisRules.ruleset
index 342d390ef1..72d55b1330 100644
--- a/CustomAnalysisRules.ruleset
+++ b/CustomAnalysisRules.ruleset
@@ -44,6 +44,7 @@
+
diff --git a/Directory.Build.props b/Directory.Build.props
index 174c7a2c62..798a01206c 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -6,7 +6,8 @@
Microsoft Health Team
Microsoft Corporation
Copyright © Microsoft Corporation. All rights reserved.
- Portable
+ true
+ embedded
true
true
true
@@ -16,7 +17,7 @@
MIT
Microsoft FHIR Server for Azure
true
- https://github.com/microsoft/fhir-server/
+ https://github.com/microsoft/fhir-server
$(MSBuildThisFileDirectory)\CodeCoverage.runsettings
net8.0;net6.0
true
@@ -57,6 +58,7 @@
+
diff --git a/Directory.Packages.props b/Directory.Packages.props
index 116096cd1a..e66667d6e0 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -32,10 +32,11 @@
-
+
-
-
+
+
+
@@ -67,15 +68,15 @@
-
+
-
+
-
+
@@ -94,16 +95,16 @@
-
+
-
+
-
+
@@ -116,7 +117,7 @@
-
+
diff --git a/build/docker/Dockerfile b/build/docker/Dockerfile
index 364f2a57a0..5e061fcc95 100644
--- a/build/docker/Dockerfile
+++ b/build/docker/Dockerfile
@@ -1,4 +1,4 @@
-FROM mcr.microsoft.com/dotnet/sdk:8.0.202-cbl-mariner2.0 AS build
+FROM mcr.microsoft.com/dotnet/sdk:8.0.204-cbl-mariner2.0 AS build
ARG FHIR_VERSION
ARG ASSEMBLY_VER
@@ -69,7 +69,7 @@ COPY . .
RUN dotnet publish /repo/src/Microsoft.Health.Fhir.${FHIR_VERSION}.Web/Microsoft.Health.Fhir.${FHIR_VERSION}.Web.csproj -c Release -o "/build" --no-restore -p:AssemblyVersion="${ASSEMBLY_VER}" -p:FileVersion="${ASSEMBLY_VER}" -p:Version="${ASSEMBLY_VER}" -f net8.0
-FROM mcr.microsoft.com/dotnet/aspnet:8.0.3-cbl-mariner2.0 AS runtime
+FROM mcr.microsoft.com/dotnet/aspnet:8.0.4-cbl-mariner2.0 AS runtime
ARG FHIR_VERSION
diff --git a/build/jobs/e2e-tests.yml b/build/jobs/e2e-tests.yml
index b0fdfed9f1..89a651b9a4 100644
--- a/build/jobs/e2e-tests.yml
+++ b/build/jobs/e2e-tests.yml
@@ -125,15 +125,15 @@ steps:
'app_nativeClient_secret': $(app_nativeClient_secret)
'app_wrongAudienceClient_id': $(app_wrongAudienceClient_id)
'app_wrongAudienceClient_secret': $(app_wrongAudienceClient_secret)
- 'app_globalAdminUser_id': $(app_globalAdminUser_id)
- 'app_globalAdminUser_secret': $(app_globalAdminUser_secret)
- 'app_globalConverterUser_id': $(app_globalConverterUser_id)
- 'app_globalConverterUser_secret': $(app_globalConverterUser_secret)
- 'app_globalExporterUser_id': $(app_globalExporterUser_id)
- 'app_globalExporterUser_secret': $(app_globalExporterUser_secret)
- 'app_globalImporterUser_id': $(app_globalImporterUser_id)
- 'app_globalImporterUser_secret': $(app_globalImporterUser_secret)
- 'app_globalReaderUser_id': $(app_globalReaderUser_id)
- 'app_globalReaderUser_secret': $(app_globalReaderUser_secret)
- 'app_globalWriterUser_id': $(app_globalWriterUser_id)
- 'app_globalWriterUser_secret': $(app_globalWriterUser_secret)
+ 'app_globalAdminUserApp_id': $(app_globalAdminUserApp_id)
+ 'app_globalAdminUserApp_secret': $(app_globalAdminUserApp_secret)
+ 'app_globalConverterUserApp_id': $(app_globalConverterUserApp_id)
+ 'app_globalConverterUserApp_secret': $(app_globalConverterUserApp_secret)
+ 'app_globalExporterUserApp_id': $(app_globalExporterUserApp_id)
+ 'app_globalExporterUserApp_secret': $(app_globalExporterUserApp_secret)
+ 'app_globalImporterUserApp_id': $(app_globalImporterUserApp_id)
+ 'app_globalImporterUserApp_secret': $(app_globalImporterUserApp_secret)
+ 'app_globalReaderUserApp_id': $(app_globalReaderUserApp_id)
+ 'app_globalReaderUserApp_secret': $(app_globalReaderUserApp_secret)
+ 'app_globalWriterUserApp_id': $(app_globalWriterUserApp_id)
+ 'app_globalWriterUserApp_secret': $(app_globalWriterUserApp_secret)
diff --git a/build/jobs/package.yml b/build/jobs/package.yml
index 0fbf89aa9e..d66ba4577a 100644
--- a/build/jobs/package.yml
+++ b/build/jobs/package.yml
@@ -70,24 +70,4 @@ steps:
inputs:
pathtoPublish: '$(build.artifactStagingDirectory)/nupkgs'
artifactName: 'nuget'
- publishLocation: 'container'
-
- - task: CopyFiles@2
- displayName: 'copy symbols'
- inputs:
- sourceFolder: '$(build.sourcesDirectory)'
- contents: |
- **/*.pdb
- !**/*.UnitTests.pdb
- targetFolder: '$(build.artifactStagingDirectory)/symbols'
- cleanTargetFolder: true
- flattenFolders: true
- overWrite: true
-
- - task: PublishBuildArtifacts@1
- displayName: 'publish symbol artifacts'
- inputs:
- pathtoPublish: '$(build.artifactStagingDirectory)/symbols'
- artifactName: 'symbols'
- publishLocation: 'container'
-
\ No newline at end of file
+ publishLocation: 'container'
\ No newline at end of file
diff --git a/build/jobs/run-export-tests.yml b/build/jobs/run-export-tests.yml
index cd6bbadff9..871aa4ef0a 100644
--- a/build/jobs/run-export-tests.yml
+++ b/build/jobs/run-export-tests.yml
@@ -95,18 +95,18 @@ jobs:
'app_nativeClient_secret': $(app_nativeClient_secret)
'app_wrongAudienceClient_id': $(app_wrongAudienceClient_id)
'app_wrongAudienceClient_secret': $(app_wrongAudienceClient_secret)
- 'app_globalAdminUser_id': $(app_globalAdminUser_id)
- 'app_globalAdminUser_secret': $(app_globalAdminUser_secret)
- 'app_globalConverterUser_id': $(app_globalConverterUser_id)
- 'app_globalConverterUser_secret': $(app_globalConverterUser_secret)
- 'app_globalExporterUser_id': $(app_globalExporterUser_id)
- 'app_globalExporterUser_secret': $(app_globalExporterUser_secret)
- 'app_globalImporterUser_id': $(app_globalImporterUser_id)
- 'app_globalImporterUser_secret': $(app_globalImporterUser_secret)
- 'app_globalReaderUser_id': $(app_globalReaderUser_id)
- 'app_globalReaderUser_secret': $(app_globalReaderUser_secret)
- 'app_globalWriterUser_id': $(app_globalWriterUser_id)
- 'app_globalWriterUser_secret': $(app_globalWriterUser_secret)
+ 'app_globalAdminUserApp_id': $(app_globalAdminUserApp_id)
+ 'app_globalAdminUserApp_secret': $(app_globalAdminUserApp_secret)
+ 'app_globalConverterUserApp_id': $(app_globalConverterUserApp_id)
+ 'app_globalConverterUserApp_secret': $(app_globalConverterUserApp_secret)
+ 'app_globalExporterUserApp_id': $(app_globalExporterUserApp_id)
+ 'app_globalExporterUserApp_secret': $(app_globalExporterUserApp_secret)
+ 'app_globalImporterUserApp_id': $(app_globalImporterUserApp_id)
+ 'app_globalImporterUserApp_secret': $(app_globalImporterUserApp_secret)
+ 'app_globalReaderUserApp_id': $(app_globalReaderUserApp_id)
+ 'app_globalReaderUserApp_secret': $(app_globalReaderUserApp_secret)
+ 'app_globalWriterUserApp_id': $(app_globalWriterUserApp_id)
+ 'app_globalWriterUserApp_secret': $(app_globalWriterUserApp_secret)
- job: 'sqlE2eTests'
dependsOn: []
@@ -198,16 +198,16 @@ jobs:
'app_nativeClient_secret': $(app_nativeClient_secret)
'app_wrongAudienceClient_id': $(app_wrongAudienceClient_id)
'app_wrongAudienceClient_secret': $(app_wrongAudienceClient_secret)
- 'app_globalAdminUser_id': $(app_globalAdminUser_id)
- 'app_globalAdminUser_secret': $(app_globalAdminUser_secret)
- 'app_globalConverterUser_id': $(app_globalConverterUser_id)
- 'app_globalConverterUser_secret': $(app_globalConverterUser_secret)
- 'app_globalExporterUser_id': $(app_globalExporterUser_id)
- 'app_globalExporterUser_secret': $(app_globalExporterUser_secret)
- 'app_globalImporterUser_id': $(app_globalImporterUser_id)
- 'app_globalImporterUser_secret': $(app_globalImporterUser_secret)
- 'app_globalReaderUser_id': $(app_globalReaderUser_id)
- 'app_globalReaderUser_secret': $(app_globalReaderUser_secret)
- 'app_globalWriterUser_id': $(app_globalWriterUser_id)
- 'app_globalWriterUser_secret': $(app_globalWriterUser_secret)
+ 'app_globalAdminUserApp_id': $(app_globalAdminUserApp_id)
+ 'app_globalAdminUserApp_secret': $(app_globalAdminUserApp_secret)
+ 'app_globalConverterUserApp_id': $(app_globalConverterUserApp_id)
+ 'app_globalConverterUserApp_secret': $(app_globalConverterUserApp_secret)
+ 'app_globalExporterUserApp_id': $(app_globalExporterUserApp_id)
+ 'app_globalExporterUserApp_secret': $(app_globalExporterUserApp_secret)
+ 'app_globalImporterUserApp_id': $(app_globalImporterUserApp_id)
+ 'app_globalImporterUserApp_secret': $(app_globalImporterUserApp_secret)
+ 'app_globalReaderUserApp_id': $(app_globalReaderUserApp_id)
+ 'app_globalReaderUserApp_secret': $(app_globalReaderUserApp_secret)
+ 'app_globalWriterUserApp_id': $(app_globalWriterUserApp_id)
+ 'app_globalWriterUserApp_secret': $(app_globalWriterUserApp_secret)
diff --git a/docs/BulkExport.md b/docs/BulkExport.md
index 84f7f9dbe9..2761bd5598 100644
--- a/docs/BulkExport.md
+++ b/docs/BulkExport.md
@@ -40,10 +40,12 @@ https://test-fhir-server/$export
For more details on Bulk Export, see the [Azure API for FHIR Export Data page](https://docs.microsoft.com/en-us/azure/healthcare-apis/fhir/export-data).
-In addition to the query parameters specified in the Azure API For FHIR documentation, users can also use the \_format in FHIR Server. \_format allows a user to select a format for the file structure that the export job creates. Different formats can be defined in the appSettings by combining constants, folder level breaks ('/'), and known tags. The tags will be replaced with data when the job is run. The three supported tags are:
+Below are set of additional query parameters users can specify in addition to ones defined in the Azure API For FHIR documentation
+1. \_format in FHIR Server: \_format allows a user to select a format for the file structure that the export job creates. Different formats can be defined in the appSettings by combining constants, folder level breaks ('/'), and known tags. The tags will be replaced with data when the job is run. The three supported tags are:
* **resourcename**: replaces with the resource type being exported
* **timestamp**: replaces with a timestamp of the job's queried time
* **id**: replaces with the GUID of the export job
+1. \_max_count: \_max_count allows to reduce the number of resources exported by a single job. Users can use the _maxCount=xxxx query parameter or set MaximumNumberOfResourcesPerQuery in the export configuration section. The default is 10,000. Export operation needs memory to serialize the data when it is writing to the lake. To reduce out of memory exceptions due to additional memory, user can choose to reduce the _max_count value by decrements of 1000. It would be beneficial for user to increase the compute memory on FHIR server as well.
To use the format, you will need to set the following settings in the appSettings:
diff --git a/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs b/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs
index b287622c85..669a490d3d 100644
--- a/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs
+++ b/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs
@@ -267,6 +267,15 @@ public static string ImportModeIsNotRecognized {
}
}
+ ///
+ /// Looks up a localized string similar to Import request cannot have duplicate files. Found duplicates: {0}. .
+ ///
+ public static string ImportRequestDuplicateInputFiles {
+ get {
+ return ResourceManager.GetString("ImportRequestDuplicateInputFiles", resourceCulture);
+ }
+ }
+
///
/// Looks up a localized string similar to Import request must be specified as a Paramters. The body provided in this request is not valid. .
///
@@ -564,6 +573,15 @@ public static string OperationNotImplemented {
}
}
+ ///
+ /// Looks up a localized string similar to The requested operation is not supported..
+ ///
+ public static string OperationNotSupported {
+ get {
+ return ResourceManager.GetString("OperationNotSupported", resourceCulture);
+ }
+ }
+
///
/// Looks up a localized string similar to FHIR Server.
///
diff --git a/src/Microsoft.Health.Fhir.Api/Resources.resx b/src/Microsoft.Health.Fhir.Api/Resources.resx
index c53c8f73d4..16838bf404 100644
--- a/src/Microsoft.Health.Fhir.Api/Resources.resx
+++ b/src/Microsoft.Health.Fhir.Api/Resources.resx
@@ -182,6 +182,9 @@
The requested "{0}" operation is not supported.
{0} is the operation name
+
+ The requested operation is not supported.
+
FHIR Server
{NumberedPlaceHolder="FHIR"}
@@ -320,6 +323,9 @@
Import request must be specified as a Paramters. The body provided in this request is not valid.
+
+ Import request cannot have duplicate files. Found duplicates: {0}.
+
The template collection reference '{0}' is invalid.
{0}: template collection reference
diff --git a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Export/ExportProcessingJobTests.cs b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Export/ExportProcessingJobTests.cs
index b856c4cf40..4daac4101a 100644
--- a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Export/ExportProcessingJobTests.cs
+++ b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Export/ExportProcessingJobTests.cs
@@ -56,7 +56,7 @@ public async Task GivenAnExportJob_WhenItIsCancelled_ThenAnExceptionIsThrown()
var expectedResults = GenerateJobRecord(OperationStatus.Canceled);
var processingJob = new ExportProcessingJob(new Func(MakeMockJob), new TestQueueClient(), new NullLogger());
- await Assert.ThrowsAsync(() => processingJob.ExecuteAsync(GenerateJobInfo(expectedResults), CancellationToken.None));
+ await Assert.ThrowsAsync(() => processingJob.ExecuteAsync(GenerateJobInfo(expectedResults), CancellationToken.None));
}
[Theory]
@@ -67,7 +67,7 @@ public async Task GivenAnExportJob_WhenItFinishesInANonTerminalState_ThenAnExcep
var expectedResults = GenerateJobRecord(status);
var processingJob = new ExportProcessingJob(new Func(MakeMockJobThatReturnsImmediately), new TestQueueClient(), new NullLogger());
- await Assert.ThrowsAsync(() => processingJob.ExecuteAsync(GenerateJobInfo(expectedResults), CancellationToken.None));
+ await Assert.ThrowsAsync(() => processingJob.ExecuteAsync(GenerateJobInfo(expectedResults), CancellationToken.None));
}
[Fact]
diff --git a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs
index c8dfbc2818..058601c674 100644
--- a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs
+++ b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs
@@ -43,151 +43,127 @@ public GetImportRequestHandlerTests()
}
[Fact]
- public async Task GivenAFhirMediator_WhenGettingAnExistingBulkImportJobWithCompletedStatus_ThenHttpResponseCodeShouldBeOk()
+ public async Task WhenGettingCompletedJob_ThenResponseCodeShouldBeOk()
{
- var coordResult = new ImportOrchestratorJobResult()
- {
- Request = "Request",
- };
-
- var orchestratorJob = new JobInfo()
- {
- Id = 0,
- GroupId = 0,
- Status = JobStatus.Completed,
- Result = JsonConvert.SerializeObject(coordResult),
- Definition = JsonConvert.SerializeObject(new ImportOrchestratorJobDefinition()),
- };
+ var coordResult = new ImportOrchestratorJobResult() { Request = "Request" };
+ var coord = new JobInfo() { Status = JobStatus.Completed, Result = JsonConvert.SerializeObject(coordResult), Definition = JsonConvert.SerializeObject(new ImportOrchestratorJobDefinition()) };
+ var workerResult = new ImportProcessingJobResult() { SucceededResources = 1, FailedResources = 1, ErrorLogLocation = "http://xyz" };
+ var worker = new JobInfo() { Id = 1, Status = JobStatus.Completed, Result = JsonConvert.SerializeObject(workerResult), Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://xyz" }) };
- var processingJobResult = new ImportProcessingJobResult()
- {
- SucceededResources = 1,
- FailedResources = 1,
- ErrorLogLocation = "http://ResourceErrorLogLocation",
- };
-
- var processingJob = new JobInfo()
- {
- Id = 1,
- GroupId = 0,
- Status = JobStatus.Completed,
- Result = JsonConvert.SerializeObject(processingJobResult),
- Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://ResourceLocation" }),
- };
-
- GetImportResponse result = await SetupAndExecuteGetBulkImportJobByIdAsync(orchestratorJob, new List() { processingJob });
+ var result = await SetupAndExecuteGetBulkImportJobByIdAsync(coord, [worker]);
Assert.Equal(HttpStatusCode.OK, result.StatusCode);
Assert.Single(result.JobResult.Output);
Assert.Single(result.JobResult.Error);
}
+ [Theory]
+ [InlineData(HttpStatusCode.BadRequest)]
+ [InlineData(HttpStatusCode.InternalServerError)]
+ [InlineData((HttpStatusCode)0)]
+ public async Task WhenGettingFailedJob_ThenExecptionIsTrownWithCorrectResponseCode(HttpStatusCode statusCode)
+ {
+ var coord = new JobInfo() { Status = JobStatus.Completed };
+ var workerResult = new ImportJobErrorResult() { ErrorMessage = "Error", HttpStatusCode = statusCode };
+ var worker = new JobInfo() { Id = 1, Status = JobStatus.Failed, Result = JsonConvert.SerializeObject(workerResult) };
+
+ var ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportJobByIdAsync(coord, [worker]));
+
+ Assert.Equal(statusCode == 0 ? HttpStatusCode.InternalServerError : statusCode, ofe.ResponseStatusCode);
+ Assert.Equal(string.Format(Core.Resources.OperationFailed, OperationsConstants.Import, ofe.ResponseStatusCode == HttpStatusCode.InternalServerError ? HttpStatusCode.InternalServerError : "Error"), ofe.Message);
+ }
+
[Fact]
- public async Task GivenAFhirMediator_WhenGettingAnCompletedImportJobWithFailure_ThenHttpResponseCodeShouldBeExpected()
+ public async Task WhenGettingFailedJob_WithGenericException_ThenExecptionIsTrownWithCorrectResponseCode()
{
- var orchestratorJobResult = new ImportOrchestratorJobErrorResult()
- {
- HttpStatusCode = HttpStatusCode.BadRequest,
- ErrorMessage = "error",
- };
+ var coord = new JobInfo() { Status = JobStatus.Completed };
+ object workerResult = new { message = "Error", stackTrace = "Trace" };
+ var worker = new JobInfo() { Id = 1, Status = JobStatus.Failed, Result = JsonConvert.SerializeObject(workerResult) };
- var orchestratorJob = new JobInfo()
- {
- Status = JobStatus.Failed,
- Result = JsonConvert.SerializeObject(orchestratorJobResult),
- };
+ var ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportJobByIdAsync(coord, [worker]));
- OperationFailedException ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportJobByIdAsync(orchestratorJob, new List()));
+ Assert.Equal(HttpStatusCode.InternalServerError, ofe.ResponseStatusCode);
+ Assert.Equal(string.Format(Core.Resources.OperationFailed, OperationsConstants.Import, HttpStatusCode.InternalServerError), ofe.Message);
+ }
+ [Fact]
+ public async Task WhenGettingImpprtWithCancelledOrchestratorJob_ThenExceptionIsThrownWithBadResponseCode()
+ {
+ var coord = new JobInfo() { Status = JobStatus.Cancelled };
+ var ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportJobByIdAsync(coord, []));
Assert.Equal(HttpStatusCode.BadRequest, ofe.ResponseStatusCode);
- Assert.NotNull(ofe.Message);
}
[Fact]
- public async Task GivenAFhirMediator_WhenGettingAnExistingBulkImportJobThatWasCanceled_ThenOperationFailedExceptionIsThrownWithBadRequestHttpResponseCode()
+ public async Task WhenGettingImportWithCancelledWorkerJob_ThenExceptionIsThrownWithBadResponseCode()
{
- var orchestratorJob = new JobInfo()
- {
- Status = JobStatus.Cancelled,
- };
- OperationFailedException ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportJobByIdAsync(orchestratorJob, new List()));
-
+ var coord = new JobInfo() { Status = JobStatus.Completed };
+ var worker = new JobInfo() { Id = 1, Status = JobStatus.Cancelled };
+ var ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportJobByIdAsync(coord, [worker]));
Assert.Equal(HttpStatusCode.BadRequest, ofe.ResponseStatusCode);
}
[Fact]
- public async Task GivenAFhirMediator_WhenGettingAnExistingBulkImportJobWithNotCompletedStatus_ThenHttpResponseCodeShouldBeAccepted()
+ public async Task WhenGettingInFlightJob_ThenResponseCodeShouldBeAccepted()
{
- var orchestratorJobResult = new ImportOrchestratorJobResult()
- {
- Request = "Request",
- };
+ var coordResult = new ImportOrchestratorJobResult() { Request = "Request" };
+ var coord = new JobInfo() { Status = JobStatus.Completed, Result = JsonConvert.SerializeObject(coordResult), Definition = JsonConvert.SerializeObject(new ImportOrchestratorJobDefinition()) };
- var orchestratorJob = new JobInfo()
- {
- Id = 1,
- GroupId = 1,
- Status = JobStatus.Running,
- Result = JsonConvert.SerializeObject(orchestratorJobResult),
- Definition = JsonConvert.SerializeObject(new ImportOrchestratorJobDefinition()),
- };
+ var workerResult = new ImportProcessingJobResult() { SucceededResources = 1, FailedResources = 1, ErrorLogLocation = "http://xyz" };
- var processingJobResult = new ImportProcessingJobResult()
+ // jobs 1 and 2 are created for the same input file, they are grouped together in the results
+ var worker1 = new JobInfo()
{
- SucceededResources = 1,
- FailedResources = 1,
- ErrorLogLocation = "http://ResourceErrorLogLocation",
+ Id = 1,
+ Status = JobStatus.Completed,
+ Result = JsonConvert.SerializeObject(workerResult),
+ Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://xyz" }),
};
- var processingJob1 = new JobInfo()
+ var worker2 = new JobInfo()
{
Id = 2,
- GroupId = 1,
Status = JobStatus.Completed,
- Result = JsonConvert.SerializeObject(processingJobResult),
- Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://ResourceLocation" }),
+ Result = JsonConvert.SerializeObject(workerResult),
+ Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://xyz" }),
};
- var processingJob2 = new JobInfo()
+ var worker3 = new JobInfo()
{
Id = 3,
- GroupId = 1,
Status = JobStatus.Completed,
- Result = JsonConvert.SerializeObject(processingJobResult),
- Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://ResourceLocation" }),
+ Result = JsonConvert.SerializeObject(workerResult),
+ Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://xyz2" }),
};
- var processingJob3 = new JobInfo()
+ var worker4 = new JobInfo()
{
Id = 4,
- GroupId = 1,
Status = JobStatus.Running,
- Result = JsonConvert.SerializeObject(processingJobResult),
- Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://ResourceLocation" }),
};
- GetImportResponse result = await SetupAndExecuteGetBulkImportJobByIdAsync(orchestratorJob, new List() { processingJob1, processingJob2, processingJob3 });
+ var result = await SetupAndExecuteGetBulkImportJobByIdAsync(coord, [worker1, worker2, worker3, worker4]);
Assert.Equal(HttpStatusCode.Accepted, result.StatusCode);
Assert.Equal(2, result.JobResult.Output.Count);
- Assert.Equal(2, result.JobResult.Error.Count);
+ Assert.Equal(3, result.JobResult.Error.Count);
}
[Fact]
- public async Task GivenAFhirMediator_WhenGettingWithNotExistJob_ThenNotFoundShouldBeReturned()
+ public async Task WhenGettingANotExistingJob_ThenNotFoundShouldBeReturned()
{
await Assert.ThrowsAsync(async () => await _mediator.GetImportStatusAsync(1, CancellationToken.None));
}
- private async Task SetupAndExecuteGetBulkImportJobByIdAsync(JobInfo orchestratorJobInfo, List processingJobInfos)
+ private async Task SetupAndExecuteGetBulkImportJobByIdAsync(JobInfo coord, List workers)
{
- _queueClient.GetJobByIdAsync(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any()).Returns(orchestratorJobInfo);
+ _queueClient.GetJobByIdAsync(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any()).Returns(coord);
- var allJobs = new List(processingJobInfos);
- allJobs.Add(orchestratorJobInfo);
+ var allJobs = new List(workers);
+ allJobs.Add(coord);
_queueClient.GetJobByGroupIdAsync(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any()).Returns(allJobs);
- return await _mediator.GetImportStatusAsync(orchestratorJobInfo.Id, CancellationToken.None);
+ return await _mediator.GetImportStatusAsync(coord.Id, CancellationToken.None);
}
}
}
diff --git a/src/Microsoft.Health.Fhir.Core/Configs/ExportJobFormatConfiguration.cs b/src/Microsoft.Health.Fhir.Core/Configs/ExportJobFormatConfiguration.cs
index 490e5b61c6..0efcb728c4 100644
--- a/src/Microsoft.Health.Fhir.Core/Configs/ExportJobFormatConfiguration.cs
+++ b/src/Microsoft.Health.Fhir.Core/Configs/ExportJobFormatConfiguration.cs
@@ -16,9 +16,9 @@ public class ExportJobFormatConfiguration
///
/// The format definition string. An export job's format is used to create the folder stucture inside the container.
/// The format is defined by tags and characters. Supported tags are defined below. The / character is used to indicate a subfolder.
- /// - Places a timestamp corisponding to the time the export job was enqueued.
- /// - The name of the resource currently being exported.
- /// - The GUID id of the export job.
+ /// <timestamp> - Places a timestamp corisponding to the time the export job was enqueued.
+ /// <resourcename> - The name of the resource currently being exported.
+ /// <id> - The GUID id of the export job.
///
public string Format { get; set; }
diff --git a/src/Microsoft.Health.Fhir.Core/Exceptions/IncompleteDeleteException.cs b/src/Microsoft.Health.Fhir.Core/Exceptions/IncompleteDeleteException.cs
new file mode 100644
index 0000000000..1068c45522
--- /dev/null
+++ b/src/Microsoft.Health.Fhir.Core/Exceptions/IncompleteDeleteException.cs
@@ -0,0 +1,22 @@
+// -------------------------------------------------------------------------------------------------
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
+// -------------------------------------------------------------------------------------------------
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Microsoft.Health.Abstractions.Exceptions;
+
+namespace Microsoft.Health.Fhir.Core.Exceptions
+{
+ public class IncompleteDeleteException : RequestTooCostlyException
+ {
+ public IncompleteDeleteException(int numberOfResourceVersionsDeleted)
+ : base(message: string.Format(Resources.PartialDeleteSuccess, numberOfResourceVersionsDeleted, StringComparison.Ordinal))
+ {
+ }
+ }
+}
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/BulkDelete/BulkDeleteProcessingJob.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/BulkDelete/BulkDeleteProcessingJob.cs
index 7bd5837fe7..a92c531d70 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Operations/BulkDelete/BulkDeleteProcessingJob.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/BulkDelete/BulkDeleteProcessingJob.cs
@@ -87,7 +87,8 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel
definition.DeleteOperation,
maxDeleteCount: null,
deleteAll: true,
- versionType: definition.VersionType),
+ versionType: definition.VersionType,
+ allowPartialSuccess: false), // Explicitly setting to call out that this can be changed in the future if we want to. Bulk delete offers the possibility of automatically rerunning the operation until it succeeds, fully automating the process.
cancellationToken);
}
catch (IncompleteOperationException ex)
@@ -103,9 +104,7 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel
if (exception != null)
{
- var jobException = new JobExecutionException($"Exception encounted while deleting resources: {result.Issues.First()}", result, exception);
- jobException.RequestCancellationOnFailure = true;
- throw jobException;
+ throw new JobExecutionException($"Exception encounted while deleting resources: {result.Issues.First()}", result, exception);
}
if (types.Count > 1)
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportProcessingJob.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportProcessingJob.cs
index 9eb4ad86e5..9e348eef40 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportProcessingJob.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportProcessingJob.cs
@@ -64,19 +64,16 @@ public Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancellation
return JsonConvert.SerializeObject(record);
case OperationStatus.Failed:
- var exception = new JobExecutionException(record.FailureDetails.FailureReason, record);
- exception.RequestCancellationOnFailure = true;
- throw exception;
+ throw new JobExecutionException(record.FailureDetails.FailureReason, record);
case OperationStatus.Canceled:
- // This throws a RetriableJobException so the job handler doesn't change the job status. The job will not be retried as cancelled jobs are ignored.
- throw new RetriableJobException($"[GroupId:{jobInfo.GroupId}/JobId:{jobInfo.Id}] Export job cancelled.");
+ throw new OperationCanceledException($"[GroupId:{jobInfo.GroupId}/JobId:{jobInfo.Id}] Export job cancelled.");
case OperationStatus.Queued:
case OperationStatus.Running:
- throw new RetriableJobException($"[GroupId:{jobInfo.GroupId}/JobId:{jobInfo.Id}] Export job finished in non-terminal state. See logs from ExportJobTask.");
+ // If code works as designed, this exception shouldn't be reached
+ throw new JobExecutionException($"[GroupId:{jobInfo.GroupId}/JobId:{jobInfo.Id}] Export job finished in non-terminal state. See logs from ExportJobTask.", record);
default:
-#pragma warning disable CA2201 // Do not raise reserved exception types. This exception shouldn't be reached, but a switch statement needs a default condition. Nothing really fits here.
- throw new Exception($"[GroupId:{jobInfo.GroupId}/JobId:{jobInfo.Id}] Job status not set.");
-#pragma warning restore CA2201 // Do not raise reserved exception types
+ // If code works as designed, this exception shouldn't be reached
+ throw new JobExecutionException($"[GroupId:{jobInfo.GroupId}/JobId:{jobInfo.Id}] Job status not set.");
}
},
cancellationToken,
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/Models/ExportJobRecordOutputConverter.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/Models/ExportJobRecordOutputConverter.cs
index a390ae497d..3904d074c4 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/Models/ExportJobRecordOutputConverter.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/Models/ExportJobRecordOutputConverter.cs
@@ -12,10 +12,10 @@ namespace Microsoft.Health.Fhir.Core.Features.Operations.Export.Models
{
///
/// A custom converter for de-serializing the Output property in ExportJobRecord correctly.
- /// In SchemaVersion v1 for EJR, Output is of Dictionary format.
- /// In SchemaVersion v2 it is of Dictionary> format.
+ /// In SchemaVersion v1 for EJR, Output is of Dictionary>string, ExportFileInfo> format.
+ /// In SchemaVersion v2 it is of Dictionary>string, List>ExportFileInfo>> format.
/// This converter makes sure the updated code can still read v1 by returning a
- /// List always.
+ /// List<ExportFileInfo> always.
///
public class ExportJobRecordOutputConverter : JsonConverter
{
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs
index 82c7bd2bff..a08a147300 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs
@@ -45,96 +45,90 @@ public async Task Handle(GetImportRequest request, Cancellati
throw new UnauthorizedFhirActionException();
}
- JobInfo coordInfo = await _queueClient.GetJobByIdAsync(QueueType.Import, request.JobId, false, cancellationToken);
- if (coordInfo == null || coordInfo.Status == JobStatus.Archived)
+ var coord = await _queueClient.GetJobByIdAsync(QueueType.Import, request.JobId, false, cancellationToken);
+ if (coord == null || coord.Status == JobStatus.Archived)
{
throw new ResourceNotFoundException(string.Format(Core.Resources.ImportJobNotFound, request.JobId));
}
-
- if (coordInfo.Status == JobStatus.Created)
+ else if (coord.Status == JobStatus.Created || coord.Status == JobStatus.Running)
{
return new GetImportResponse(HttpStatusCode.Accepted);
}
- else if (coordInfo.Status == JobStatus.Running)
+ else if (coord.Status == JobStatus.Cancelled)
{
- if (string.IsNullOrEmpty(coordInfo.Result))
- {
- return new GetImportResponse(HttpStatusCode.Accepted);
- }
-
- ImportOrchestratorJobResult orchestratorJobResult = JsonConvert.DeserializeObject(coordInfo.Result);
-
- (List completedOperationOutcome, List failedOperationOutcome)
- = await GetProcessingResultAsync(coordInfo.GroupId, cancellationToken);
-
- var result = new ImportJobResult()
- {
- Request = orchestratorJobResult.Request,
- TransactionTime = coordInfo.CreateDate,
- Output = completedOperationOutcome,
- Error = failedOperationOutcome,
- };
-
- return new GetImportResponse(HttpStatusCode.Accepted, result);
+ throw new OperationFailedException(Core.Resources.UserRequestedCancellation, HttpStatusCode.BadRequest);
}
- else if (coordInfo.Status == JobStatus.Completed)
+ else if (coord.Status == JobStatus.Failed)
{
- ImportOrchestratorJobResult orchestratorJobResult = JsonConvert.DeserializeObject(coordInfo.Result);
-
- (List completedOperationOutcome, List failedOperationOutcome)
- = await GetProcessingResultAsync(coordInfo.GroupId, cancellationToken);
-
- var result = new ImportJobResult()
+ var errorResult = JsonConvert.DeserializeObject(coord.Result);
+ if (errorResult.HttpStatusCode == 0)
{
- Request = orchestratorJobResult.Request,
- TransactionTime = coordInfo.CreateDate,
- Output = completedOperationOutcome,
- Error = failedOperationOutcome,
- };
-
- return new GetImportResponse(HttpStatusCode.OK, result);
- }
- else if (coordInfo.Status == JobStatus.Failed)
- {
- ImportOrchestratorJobErrorResult errorResult = JsonConvert.DeserializeObject(coordInfo.Result);
-
- string failureReason = errorResult.ErrorMessage;
- HttpStatusCode failureStatusCode = errorResult.HttpStatusCode;
+ errorResult.HttpStatusCode = HttpStatusCode.InternalServerError;
+ }
- throw new OperationFailedException(
- string.Format(Core.Resources.OperationFailed, OperationsConstants.Import, failureReason), failureStatusCode);
+ // hide error message for InternalServerError
+ var failureReason = errorResult.HttpStatusCode == HttpStatusCode.InternalServerError ? HttpStatusCode.InternalServerError.ToString() : errorResult.ErrorMessage;
+ throw new OperationFailedException(string.Format(Core.Resources.OperationFailed, OperationsConstants.Import, failureReason), errorResult.HttpStatusCode);
}
- else if (coordInfo.Status == JobStatus.Cancelled)
+ else if (coord.Status == JobStatus.Completed)
{
- throw new OperationFailedException(Core.Resources.UserRequestedCancellation, HttpStatusCode.BadRequest);
+ var start = Stopwatch.StartNew();
+ var jobs = (await _queueClient.GetJobByGroupIdAsync(QueueType.Import, coord.GroupId, true, cancellationToken)).Where(x => x.Id != coord.Id).ToList();
+ var results = GetProcessingResultAsync(jobs);
+ await Task.Delay(TimeSpan.FromSeconds(start.Elapsed.TotalSeconds > 6 ? 60 : start.Elapsed.TotalSeconds * 10), cancellationToken); // throttle to avoid misuse.
+ var inFlightJobsExist = jobs.Any(x => x.Status == JobStatus.Running || x.Status == JobStatus.Created);
+ var cancelledJobsExist = jobs.Any(x => x.Status == JobStatus.Cancelled || (x.Status == JobStatus.Running && x.CancelRequested));
+ var failedJobsExist = jobs.Any(x => x.Status == JobStatus.Failed);
+
+ if (cancelledJobsExist && !failedJobsExist)
+ {
+ throw new OperationFailedException(Core.Resources.UserRequestedCancellation, HttpStatusCode.BadRequest);
+ }
+ else if (failedJobsExist)
+ {
+ var failed = jobs.First(x => x.Status == JobStatus.Failed);
+ var errorResult = JsonConvert.DeserializeObject(failed.Result);
+ if (errorResult.HttpStatusCode == 0)
+ {
+ errorResult.HttpStatusCode = HttpStatusCode.InternalServerError;
+ }
+
+ // hide error message for InternalServerError
+ var failureReason = errorResult.HttpStatusCode == HttpStatusCode.InternalServerError ? HttpStatusCode.InternalServerError.ToString() : errorResult.ErrorMessage;
+ throw new OperationFailedException(string.Format(Core.Resources.OperationFailed, OperationsConstants.Import, failureReason), errorResult.HttpStatusCode);
+ }
+ else // no failures here
+ {
+ var coordResult = JsonConvert.DeserializeObject(coord.Result);
+ var result = new ImportJobResult() { Request = coordResult.Request, TransactionTime = coord.CreateDate, Output = results.Completed, Error = results.Failed };
+ return new GetImportResponse(!inFlightJobsExist ? HttpStatusCode.OK : HttpStatusCode.Accepted, result);
+ }
}
else
{
throw new OperationFailedException(Core.Resources.UnknownError, HttpStatusCode.InternalServerError);
}
- }
- private async Task<(List completedOperationOutcome, List failedOperationOutcome)> GetProcessingResultAsync(long groupId, CancellationToken cancellationToken)
- {
- var start = Stopwatch.StartNew();
- var jobs = await _queueClient.GetJobByGroupIdAsync(QueueType.Import, groupId, true, cancellationToken);
- var duration = start.Elapsed.TotalSeconds;
- var completedOperationOutcome = new List();
- var failedOperationOutcome = new List();
- foreach (var job in jobs.Where(_ => _.Id != groupId && _.Status == JobStatus.Completed)) // ignore coordinator && not completed
+ static (List Completed, List Failed) GetProcessingResultAsync(IList jobs)
{
- var definition = JsonConvert.DeserializeObject(job.Definition);
- var result = JsonConvert.DeserializeObject(job.Result);
- completedOperationOutcome.Add(new ImportOperationOutcome() { Type = definition.ResourceType, Count = result.SucceededResources, InputUrl = new Uri(definition.ResourceLocation) });
- if (result.FailedResources > 0)
+ var completed = new List();
+ var failed = new List();
+ foreach (var job in jobs.Where(_ => _.Status == JobStatus.Completed))
{
- failedOperationOutcome.Add(new ImportFailedOperationOutcome() { Type = definition.ResourceType, Count = result.FailedResources, InputUrl = new Uri(definition.ResourceLocation), Url = result.ErrorLogLocation });
+ var definition = JsonConvert.DeserializeObject(job.Definition);
+ var result = JsonConvert.DeserializeObject(job.Result);
+ completed.Add(new ImportOperationOutcome() { Type = definition.ResourceType, Count = result.SucceededResources, InputUrl = new Uri(definition.ResourceLocation) });
+ if (result.FailedResources > 0)
+ {
+ failed.Add(new ImportFailedOperationOutcome() { Type = definition.ResourceType, Count = result.FailedResources, InputUrl = new Uri(definition.ResourceLocation), Url = result.ErrorLogLocation });
+ }
}
- }
- await Task.Delay(TimeSpan.FromSeconds(duration * 10), cancellationToken); // throttle to avoid misuse.
+ // group success results by url
+ var groupped = completed.GroupBy(o => o.InputUrl).Select(g => new ImportOperationOutcome() { Type = g.First().Type, Count = g.Sum(_ => _.Count), InputUrl = g.Key }).ToList();
- return (completedOperationOutcome, failedOperationOutcome);
+ return (groupped, failed);
+ }
}
}
}
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStore.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStore.cs
index 48bec833c3..4a3133e341 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStore.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStore.cs
@@ -5,12 +5,14 @@
using System;
using System.IO;
+using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
using EnsureThat;
using Microsoft.Extensions.Logging;
using Microsoft.Health.JobManagement;
using Microsoft.IO;
+using Polly;
namespace Microsoft.Health.Fhir.Core.Features.Operations.Import
{
@@ -20,6 +22,9 @@ public class ImportErrorStore : IImportErrorStore
private Uri _fileUri;
private RecyclableMemoryStreamManager _recyclableMemoryStreamManager;
private ILogger _logger;
+ private static readonly AsyncPolicy _retries = Policy
+ .Handle()
+ .WaitAndRetryAsync(3, _ => TimeSpan.FromMilliseconds(RandomNumberGenerator.GetInt32(1000, 5000)));
public ImportErrorStore(IIntegrationDataStoreClient integrationDataStoreClient, Uri fileUri, ILogger logger)
{
@@ -41,7 +46,6 @@ public ImportErrorStore(IIntegrationDataStoreClient integrationDataStoreClient,
///
/// New import errors
/// Cancellaltion Token
- [System.Diagnostics.CodeAnalysis.SuppressMessage("Reliability", "CA2016:Forward the 'CancellationToken' parameter to methods", Justification = ".NET 6/8 compat")]
public async Task UploadErrorsAsync(string[] importErrors, CancellationToken cancellationToken)
{
if (importErrors == null || importErrors.Length == 0)
@@ -49,7 +53,7 @@ public async Task UploadErrorsAsync(string[] importErrors, CancellationToken can
return;
}
- try
+ await _retries.ExecuteAsync(async () =>
{
using var stream = new RecyclableMemoryStream(_recyclableMemoryStreamManager, tag: nameof(ImportErrorStore));
using StreamWriter writer = new StreamWriter(stream);
@@ -64,13 +68,8 @@ public async Task UploadErrorsAsync(string[] importErrors, CancellationToken can
string blockId = Convert.ToBase64String(Guid.NewGuid().ToByteArray());
await _integrationDataStoreClient.UploadBlockAsync(_fileUri, stream, blockId, cancellationToken);
- await _integrationDataStoreClient.AppendCommitAsync(_fileUri, new string[] { blockId }, cancellationToken);
- }
- catch (Exception ex)
- {
- _logger.LogWarning(ex, "Failed to upload import error log.");
- throw new RetriableJobException(ex.Message, ex);
- }
+ await _integrationDataStoreClient.AppendCommitAsync(_fileUri, [blockId], cancellationToken);
+ });
}
}
}
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFileEtagNotMatchException.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFileEtagNotMatchException.cs
deleted file mode 100644
index c278a4a0bb..0000000000
--- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFileEtagNotMatchException.cs
+++ /dev/null
@@ -1,24 +0,0 @@
-// -------------------------------------------------------------------------------------------------
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
-// -------------------------------------------------------------------------------------------------
-
-using System;
-using System.Diagnostics;
-
-namespace Microsoft.Health.Fhir.Core.Features.Operations.Import
-{
- public class ImportFileEtagNotMatchException : Exception
- {
- public ImportFileEtagNotMatchException(string message)
- : base(message, null)
- {
- }
-
- public ImportFileEtagNotMatchException(string message, Exception innerException)
- : base(message, innerException)
- {
- Debug.Assert(!string.IsNullOrEmpty(message), "Exception message should not be empty.");
- }
- }
-}
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorJobErrorResult.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportJobErrorResult.cs
similarity index 79%
rename from src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorJobErrorResult.cs
rename to src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportJobErrorResult.cs
index 34e5be28cd..f28b4f1ac2 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorJobErrorResult.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportJobErrorResult.cs
@@ -7,7 +7,7 @@
namespace Microsoft.Health.Fhir.Core.Features.Operations.Import
{
- public class ImportOrchestratorJobErrorResult
+ public class ImportJobErrorResult
{
///
/// Err http status code
@@ -19,11 +19,6 @@ public class ImportOrchestratorJobErrorResult
///
public string ErrorMessage { get; set; }
- ///
- /// Inner error if there're multiple errors
- ///
- public ImportOrchestratorJobErrorResult InnerError { get; set; }
-
///
/// Details
///
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingException.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingException.cs
deleted file mode 100644
index 1862f9fbbe..0000000000
--- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingException.cs
+++ /dev/null
@@ -1,24 +0,0 @@
-// -------------------------------------------------------------------------------------------------
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
-// -------------------------------------------------------------------------------------------------
-
-using System;
-using System.Diagnostics;
-
-namespace Microsoft.Health.Fhir.Core.Features.Operations.Import
-{
- public class ImportProcessingException : Exception
- {
- public ImportProcessingException(string message)
- : this(message, null)
- {
- }
-
- public ImportProcessingException(string message, Exception innerException)
- : base(message, innerException)
- {
- Debug.Assert(!string.IsNullOrEmpty(message), "Exception message should not be empty.");
- }
- }
-}
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobErrorResult.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobErrorResult.cs
deleted file mode 100644
index 19d7345608..0000000000
--- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobErrorResult.cs
+++ /dev/null
@@ -1,14 +0,0 @@
-// -------------------------------------------------------------------------------------------------
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
-// -------------------------------------------------------------------------------------------------
-
-namespace Microsoft.Health.Fhir.Core.Features.Operations.Import
-{
- public class ImportProcessingJobErrorResult
- {
- public string Message { get; set; }
-
- public string Details { get; set; }
- }
-}
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobResult.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobResult.cs
index 81a86ba85b..1bd337687f 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobResult.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobResult.cs
@@ -31,10 +31,5 @@ public class ImportProcessingJobResult
/// Critical error during data processing.
///
public string ErrorDetails { get; set; }
-
- ///
- /// Current index for last checkpoint
- ///
- public long CurrentIndex { get; set; }
}
}
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryResourceCountsConverter.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryResourceCountsConverter.cs
index 5a8cb72a3b..f93630a6d0 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryResourceCountsConverter.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryResourceCountsConverter.cs
@@ -13,7 +13,7 @@
namespace Microsoft.Health.Fhir.Core.Features.Operations.Reindex.Models
{
///
- /// JsonConverter to handle from the legacy version with ‹string, int› to the current version with ‹string, SearchResultReindex›.
+ /// JsonConverter to handle from the legacy version with <string, int> to the current version with <string, SearchResultReindex>.
///
public class ReindexJobQueryResourceCountsConverter : JsonConverter>
{
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryStatusConverter.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryStatusConverter.cs
index 905e45c24b..4fecca60ed 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryStatusConverter.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryStatusConverter.cs
@@ -12,7 +12,7 @@
namespace Microsoft.Health.Fhir.Core.Features.Operations.Reindex.Models
{
///
- /// JsonConverter to handle change from ConcurrentBag to ConcurrentDictionary.
+ /// JsonConverter to handle change from ConcurrentBag to ConcurrentDictionary.
/// For backcompat and fact what we don't need values in dictionary and only key uniqueness, we read and write it as array and not dictionary.
///
public class ReindexJobQueryStatusConverter : JsonConverter>
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/ReindexJobTask.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/ReindexJobTask.cs
index decb4b1455..88983ff9bc 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/ReindexJobTask.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/ReindexJobTask.cs
@@ -802,7 +802,7 @@ private async Task CalculateAndSetTotalAndResourceCounts()
///
/// Gets called from and only gets called when all queryList items are status of completed
///
- /// Task<(int totalCount, List
+ /// Count and resource types.
private async Task<(int totalCount, List resourcesTypes)> CalculateTotalCount()
{
int totalCount = 0;
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Persistence/IFhirDataStore.cs b/src/Microsoft.Health.Fhir.Core/Features/Persistence/IFhirDataStore.cs
index 165cdb49c2..732b4bf1db 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Persistence/IFhirDataStore.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Persistence/IFhirDataStore.cs
@@ -21,7 +21,15 @@ public interface IFhirDataStore
Task GetAsync(ResourceKey key, CancellationToken cancellationToken);
- Task HardDeleteAsync(ResourceKey key, bool keepCurrentVersion, CancellationToken cancellationToken);
+ ///
+ /// Hard deletes a resource.
+ ///
+ /// Identifier of the resource
+ /// Keeps the current version of the resource, only deleting history
+ /// Only for Cosmos. Allows for a delete to partially succeed if it fails to delete all versions of a resource in one try.
+ /// Cancellation Token
+ /// Async Task
+ Task HardDeleteAsync(ResourceKey key, bool keepCurrentVersion, bool allowPartialSuccess, CancellationToken cancellationToken);
Task BulkUpdateSearchParameterIndicesAsync(IReadOnlyCollection resources, CancellationToken cancellationToken);
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Persistence/ResourceWrapperFactory.cs b/src/Microsoft.Health.Fhir.Core/Features/Persistence/ResourceWrapperFactory.cs
index 0f8a6b4fcd..8ce65abe81 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Persistence/ResourceWrapperFactory.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Persistence/ResourceWrapperFactory.cs
@@ -40,7 +40,6 @@ public class ResourceWrapperFactory : IResourceWrapperFactory
/// The compartment indexer.
/// The search parameter definition manager.
/// Resource deserializer
- /// Resource id provider
public ResourceWrapperFactory(
IRawResourceFactory rawResourceFactory,
RequestContextAccessor fhirRequestContextAccessor,
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Search/Expressions/Expression.cs b/src/Microsoft.Health.Fhir.Core/Features/Search/Expressions/Expression.cs
index 4bc27e0f9e..d10728fcc6 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Search/Expressions/Expression.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Search/Expressions/Expression.cs
@@ -74,7 +74,7 @@ public static MultiaryExpression And(IReadOnlyList expressions)
/// The target resource type.
/// If this is a reversed chained expression.
/// The expression.
- /// A that represents chained operation on through .
+ /// A that represents chained operation on through .
public static ChainedExpression Chained(string[] resourceTypes, SearchParameterInfo referenceSearchParameter, string[] targetResourceTypes, bool reversed, Expression expression)
{
return new ChainedExpression(resourceTypes, referenceSearchParameter, targetResourceTypes, reversed, expression);
@@ -91,7 +91,7 @@ public static ChainedExpression Chained(string[] resourceTypes, SearchParameterI
/// If this is a wildcard include.
/// If this is a reversed include (revinclude) expression.
/// If this is include has :iterate (:recurse) modifier.
- /// A that represents an include on through .
+ /// A that represents an include on through .
public static IncludeExpression Include(string[] resourceTypes, SearchParameterInfo referenceSearchParameter, string sourceResourceType, string targetResourceType, IEnumerable referencedTypes, bool wildCard, bool reversed, bool iterate)
{
return new IncludeExpression(resourceTypes, referenceSearchParameter, sourceResourceType, targetResourceType, referencedTypes, wildCard, reversed, iterate, null);
@@ -263,7 +263,7 @@ public static StringExpression StartsWith(FieldName fieldName, int? componentInd
}
///
- /// Creates a that represents logical IN operation over .
+ /// Creates a that represents logical IN operation over .
///
/// Type of the value included in the expression.
/// The field name.
@@ -318,14 +318,14 @@ public static SmartCompartmentSearchExpression SmartCompartmentSearch(string com
///
/// Accumulates a "value-insensitive" hash code of this instance, meaning it ignores parameterizable values.
- /// For example, date=2013&name=Smith and date=2014&name=Trudeau would have the same hash code.
+ /// For example, date=2013&name=Smith and date=2014&name=Trudeau would have the same hash code.
///
/// The HashCode instance to accumulate into
public abstract void AddValueInsensitiveHashCode(ref HashCode hashCode);
///
/// Determines whether the given expression is equal to this instance, ignoring any parameterizable values.
- /// For example, date=2013&name=Smith and date=2014&name=Trudeau would be considered equal
+ /// For example, date=2013&name=Smith and date=2014&name=Trudeau would be considered equal
///
/// The expression to compare this instance to.
public abstract bool ValueInsensitiveEquals(Expression other);
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Search/TypedElementSearchIndexer.cs b/src/Microsoft.Health.Fhir.Core/Features/Search/TypedElementSearchIndexer.cs
index 0ac194cc46..4dae486f41 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Search/TypedElementSearchIndexer.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Search/TypedElementSearchIndexer.cs
@@ -177,13 +177,13 @@ private IEnumerable ProcessNonCompositeSearchParameter(SearchP
}
else
{
- ///
- /// searchValue should not have a null value
- /// But if the input json is not in the correct format then we are parsing the body here and passing the initial validations for required fields here
- /// e.g. If the body contains Coverage.status = "", then after parsing Coverage.status = null & Coverage.statusElement = null, resulting into minimum cardinality error as expected
- /// If the body contains Coverage.status = , then after parsing Coverage.status = null & Coverage.statusElement = {value=null}, which passes the Firely validation and CodeToTokenSearchValueConverter returns null
- /// In this case return BadRequestException with a valid message instead of 500
- ///
+ //
+ // searchValue should not have a null value
+ // But if the input json is not in the correct format then we are parsing the body here and passing the initial validations for required fields here
+ // e.g. If the body contains Coverage.status = "", then after parsing Coverage.status = null & Coverage.statusElement = null, resulting into minimum cardinality error as expected
+ // If the body contains Coverage.status = , then after parsing Coverage.status = null & Coverage.statusElement = {value=null}, which passes the Firely validation and CodeToTokenSearchValueConverter returns null
+ // In this case return BadRequestException with a valid message instead of 500
+ //
throw new BadRequestException(string.Format(Core.Resources.ValueCannotBeNull, searchParameter.Expression));
}
}
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Storage/FhirMemoryCache.cs b/src/Microsoft.Health.Fhir.Core/Features/Storage/FhirMemoryCache.cs
index 68bc4ffc47..bc464ea729 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Storage/FhirMemoryCache.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Storage/FhirMemoryCache.cs
@@ -55,7 +55,6 @@ public FhirMemoryCache(string name, int limitSizeInMegabytes, TimeSpan expiratio
///
/// Get or add the value to cache.
///
- /// Type of the value in cache
/// Key
/// Value
/// Value in cache
@@ -118,7 +117,7 @@ public T Get(string key)
}
///
- /// Try to retrieve an item from cache, if it does not exist then returns the for that generic type.
+ /// Try to retrieve an item from cache, if it does not exist then returns the default for that generic type.
///
/// Key
/// Value
diff --git a/src/Microsoft.Health.Fhir.Core/Features/Validation/ResourceContentValidator.cs b/src/Microsoft.Health.Fhir.Core/Features/Validation/ResourceContentValidator.cs
index 1b1b7fbdfb..62738d4cbb 100644
--- a/src/Microsoft.Health.Fhir.Core/Features/Validation/ResourceContentValidator.cs
+++ b/src/Microsoft.Health.Fhir.Core/Features/Validation/ResourceContentValidator.cs
@@ -17,7 +17,6 @@ namespace Microsoft.Health.Fhir.Core.Features.Validation
///
/// Validates content of resource.
///
- /// The type of the element.
///
/// Even if we correctly parsed resource into object it doesn't mean resource is valid.
/// We need to check that properties have right cardinality, correct types, proper format, etc.
diff --git a/src/Microsoft.Health.Fhir.Core/Messages/ConvertData/ConvertDataRequest.cs b/src/Microsoft.Health.Fhir.Core/Messages/ConvertData/ConvertDataRequest.cs
index 4b13916925..91dd7abfad 100644
--- a/src/Microsoft.Health.Fhir.Core/Messages/ConvertData/ConvertDataRequest.cs
+++ b/src/Microsoft.Health.Fhir.Core/Messages/ConvertData/ConvertDataRequest.cs
@@ -58,7 +58,7 @@ public ConvertDataRequest(
///
/// Reference for template collection.
- /// The format is "/:" for template collection stored in container registries.
+ /// The format is "<registryServer>/<imageName>:<imageTag>" for template collection stored in container registries.
/// Also supports image digest as reference. Will use 'latest' if no tag or digest present.
///
public string TemplateCollectionReference { get; }
diff --git a/src/Microsoft.Health.Fhir.Core/Messages/Delete/ConditionalDeleteResourceRequest.cs b/src/Microsoft.Health.Fhir.Core/Messages/Delete/ConditionalDeleteResourceRequest.cs
index 433147cf1d..f3121cafc9 100644
--- a/src/Microsoft.Health.Fhir.Core/Messages/Delete/ConditionalDeleteResourceRequest.cs
+++ b/src/Microsoft.Health.Fhir.Core/Messages/Delete/ConditionalDeleteResourceRequest.cs
@@ -23,7 +23,8 @@ public ConditionalDeleteResourceRequest(
int? maxDeleteCount,
BundleResourceContext bundleResourceContext = null,
bool deleteAll = false,
- ResourceVersionType versionType = ResourceVersionType.Latest)
+ ResourceVersionType versionType = ResourceVersionType.Latest,
+ bool allowPartialSuccess = false)
: base(resourceType, conditionalParameters, bundleResourceContext)
{
EnsureArg.IsNotNull(conditionalParameters, nameof(conditionalParameters));
@@ -32,6 +33,7 @@ public ConditionalDeleteResourceRequest(
MaxDeleteCount = maxDeleteCount;
DeleteAll = deleteAll;
VersionType = versionType;
+ AllowPartialSuccess = allowPartialSuccess;
}
public DeleteOperation DeleteOperation { get; }
@@ -42,6 +44,8 @@ public ConditionalDeleteResourceRequest(
public ResourceVersionType VersionType { get; }
+ public bool AllowPartialSuccess { get; }
+
protected override IEnumerable GetCapabilities() => Capabilities;
}
}
diff --git a/src/Microsoft.Health.Fhir.Core/Messages/Delete/DeleteResourceRequest.cs b/src/Microsoft.Health.Fhir.Core/Messages/Delete/DeleteResourceRequest.cs
index 34518e4a8f..2acf50551f 100644
--- a/src/Microsoft.Health.Fhir.Core/Messages/Delete/DeleteResourceRequest.cs
+++ b/src/Microsoft.Health.Fhir.Core/Messages/Delete/DeleteResourceRequest.cs
@@ -14,16 +14,17 @@ namespace Microsoft.Health.Fhir.Core.Messages.Delete
{
public class DeleteResourceRequest : IRequest, IRequireCapability
{
- public DeleteResourceRequest(ResourceKey resourceKey, DeleteOperation deleteOperation, BundleResourceContext bundleResourceContext = null)
+ public DeleteResourceRequest(ResourceKey resourceKey, DeleteOperation deleteOperation, BundleResourceContext bundleResourceContext = null, bool allowPartialSuccess = false)
{
EnsureArg.IsNotNull(resourceKey, nameof(resourceKey));
ResourceKey = resourceKey;
DeleteOperation = deleteOperation;
BundleResourceContext = bundleResourceContext;
+ AllowPartialSuccess = allowPartialSuccess;
}
- public DeleteResourceRequest(string type, string id, DeleteOperation deleteOperation, BundleResourceContext bundleResourceContext = null)
+ public DeleteResourceRequest(string type, string id, DeleteOperation deleteOperation, BundleResourceContext bundleResourceContext = null, bool allowPartialSuccess = false)
{
EnsureArg.IsNotNull(type, nameof(type));
EnsureArg.IsNotNull(id, nameof(id));
@@ -31,6 +32,7 @@ public DeleteResourceRequest(string type, string id, DeleteOperation deleteOpera
ResourceKey = new ResourceKey(type, id);
DeleteOperation = deleteOperation;
BundleResourceContext = bundleResourceContext;
+ AllowPartialSuccess = allowPartialSuccess;
}
public ResourceKey ResourceKey { get; }
@@ -39,6 +41,8 @@ public DeleteResourceRequest(string type, string id, DeleteOperation deleteOpera
public DeleteOperation DeleteOperation { get; }
+ public bool AllowPartialSuccess { get; }
+
public IEnumerable RequiredCapabilities()
{
yield return new CapabilityQuery($"CapabilityStatement.rest.resource.where(type = '{ResourceKey.ResourceType}').interaction.where(code = 'delete').exists()");
diff --git a/src/Microsoft.Health.Fhir.Core/Resources.Designer.cs b/src/Microsoft.Health.Fhir.Core/Resources.Designer.cs
index 144a4a6c9e..cc8818d488 100644
--- a/src/Microsoft.Health.Fhir.Core/Resources.Designer.cs
+++ b/src/Microsoft.Health.Fhir.Core/Resources.Designer.cs
@@ -988,6 +988,15 @@ internal static string OrDelimiter {
}
}
+ ///
+ /// Looks up a localized string similar to Deleted {0} versions of the target resource..
+ ///
+ internal static string PartialDeleteSuccess {
+ get {
+ return ResourceManager.GetString("PartialDeleteSuccess", resourceCulture);
+ }
+ }
+
///
/// Looks up a localized string similar to Patching immutable properties is not allowed..
///
diff --git a/src/Microsoft.Health.Fhir.Core/Resources.resx b/src/Microsoft.Health.Fhir.Core/Resources.resx
index cf51079c07..7afb460c69 100644
--- a/src/Microsoft.Health.Fhir.Core/Resources.resx
+++ b/src/Microsoft.Health.Fhir.Core/Resources.resx
@@ -736,4 +736,8 @@
A resource should only appear once in each Bundle.
Error message for a duplicate resource key in the same bundle
+
+ Deleted {0} versions of the target resource.
+ {0} is replaced with the number of deleted versions of the resource.
+
\ No newline at end of file
diff --git a/src/Microsoft.Health.Fhir.CosmosDb.UnitTests/Features/Storage/CosmosFhirDataStoreTests.cs b/src/Microsoft.Health.Fhir.CosmosDb.UnitTests/Features/Storage/CosmosFhirDataStoreTests.cs
index 4da108a6e6..ecbeba93df 100644
--- a/src/Microsoft.Health.Fhir.CosmosDb.UnitTests/Features/Storage/CosmosFhirDataStoreTests.cs
+++ b/src/Microsoft.Health.Fhir.CosmosDb.UnitTests/Features/Storage/CosmosFhirDataStoreTests.cs
@@ -11,9 +11,11 @@
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Threading;
+using System.Threading.Tasks;
using Hl7.Fhir.Model;
using Hl7.Fhir.Serialization;
using Microsoft.Azure.Cosmos;
+using Microsoft.Azure.Cosmos.Scripts;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
@@ -21,6 +23,7 @@
using Microsoft.Health.Core.Features.Context;
using Microsoft.Health.Extensions.DependencyInjection;
using Microsoft.Health.Fhir.Core.Configs;
+using Microsoft.Health.Fhir.Core.Exceptions;
using Microsoft.Health.Fhir.Core.Extensions;
using Microsoft.Health.Fhir.Core.Features.Context;
using Microsoft.Health.Fhir.Core.Features.Definition;
@@ -283,6 +286,23 @@ public async Task GivenAnUpsertDuringABatch_When408ExceptionOccurs_RetryWillHapp
await _container.Value.ReceivedWithAnyArgs(7).CreateItemAsync(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any());
}
+ [Fact]
+ public async Task GivenAHardDeleteRequest_WhenPartiallySuccessful_ThenAnExceptionIsThrown()
+ {
+ var resourceKey = new ResourceKey(KnownResourceTypes.Patient, "test");
+
+ var scripts = Substitute.For();
+ scripts.ExecuteStoredProcedureAsync(Arg.Any(), Arg.Any(), Arg.Any